summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cooker.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/cooker.py')
-rw-r--r--bitbake/lib/bb/cooker.py2275
1 files changed, 0 insertions, 2275 deletions
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
deleted file mode 100644
index f4ab797edf..0000000000
--- a/bitbake/lib/bb/cooker.py
+++ /dev/null
@@ -1,2275 +0,0 @@
1#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import sys, os, glob, os.path, re, time
13import itertools
14import logging
15import multiprocessing
16import sre_constants
17import threading
18from io import StringIO, UnsupportedOperation
19from contextlib import closing
20from collections import defaultdict, namedtuple
21import bb, bb.exceptions, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
23import queue
24import signal
25import prserv.serv
26import pyinotify
27import json
28import pickle
29import codecs
30import hashserv
31
32logger = logging.getLogger("BitBake")
33collectlog = logging.getLogger("BitBake.Collection")
34buildlog = logging.getLogger("BitBake.Build")
35parselog = logging.getLogger("BitBake.Parsing")
36providerlog = logging.getLogger("BitBake.Provider")
37
38class NoSpecificMatch(bb.BBHandledException):
39 """
40 Exception raised when no or multiple file matches are found
41 """
42
43class NothingToBuild(Exception):
44 """
45 Exception raised when there is nothing to build
46 """
47
48class CollectionError(bb.BBHandledException):
49 """
50 Exception raised when layer configuration is incorrect
51 """
52
53class state:
54 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
55
56 @classmethod
57 def get_name(cls, code):
58 for name in dir(cls):
59 value = getattr(cls, name)
60 if type(value) == type(cls.initial) and value == code:
61 return name
62 raise ValueError("Invalid status code: %s" % code)
63
64
65class SkippedPackage:
66 def __init__(self, info = None, reason = None):
67 self.pn = None
68 self.skipreason = None
69 self.provides = None
70 self.rprovides = None
71
72 if info:
73 self.pn = info.pn
74 self.skipreason = info.skipreason
75 self.provides = info.provides
76 self.rprovides = info.packages + info.rprovides
77 for package in info.packages:
78 self.rprovides += info.rprovides_pkg[package]
79 elif reason:
80 self.skipreason = reason
81
82
83class CookerFeatures(object):
84 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
85
86 def __init__(self):
87 self._features=set()
88
89 def setFeature(self, f):
90 # validate we got a request for a feature we support
91 if f not in CookerFeatures._feature_list:
92 return
93 self._features.add(f)
94
95 def __contains__(self, f):
96 return f in self._features
97
98 def __iter__(self):
99 return self._features.__iter__()
100
101 def __next__(self):
102 return next(self._features)
103
104
105class EventWriter:
106 def __init__(self, cooker, eventfile):
107 self.file_inited = None
108 self.cooker = cooker
109 self.eventfile = eventfile
110 self.event_queue = []
111
112 def write_event(self, event):
113 with open(self.eventfile, "a") as f:
114 try:
115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
116 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
117 "vars": str_event}))
118 except Exception as err:
119 import traceback
120 print(err, traceback.format_exc())
121
122 def send(self, event):
123 if self.file_inited:
124 # we have the file, just write the event
125 self.write_event(event)
126 else:
127 # init on bb.event.BuildStarted
128 name = "%s.%s" % (event.__module__, event.__class__.__name__)
129 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130 with open(self.eventfile, "w") as f:
131 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133 self.file_inited = True
134
135 # write pending events
136 for evt in self.event_queue:
137 self.write_event(evt)
138
139 # also write the current event
140 self.write_event(event)
141 else:
142 # queue all events until the file is inited
143 self.event_queue.append(event)
144
145#============================================================================#
146# BBCooker
147#============================================================================#
148class BBCooker:
149 """
150 Manages one bitbake build run
151 """
152
153 def __init__(self, featureSet=None, idleCallBackRegister=None):
154 self.recipecaches = None
155 self.eventlog = None
156 self.skiplist = {}
157 self.featureset = CookerFeatures()
158 if featureSet:
159 for f in featureSet:
160 self.featureset.setFeature(f)
161
162 self.configuration = bb.cookerdata.CookerConfiguration()
163
164 self.idleCallBackRegister = idleCallBackRegister
165
166 bb.debug(1, "BBCooker starting %s" % time.time())
167 sys.stdout.flush()
168
169 self.configwatcher = pyinotify.WatchManager()
170 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
171 sys.stdout.flush()
172
173 self.configwatcher.bbseen = set()
174 self.configwatcher.bbwatchedfiles = set()
175 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
176 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
177 sys.stdout.flush()
178 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
179 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
180 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
181 self.watcher = pyinotify.WatchManager()
182 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
183 sys.stdout.flush()
184 self.watcher.bbseen = set()
185 self.watcher.bbwatchedfiles = set()
186 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
187
188 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
189 sys.stdout.flush()
190
191 # If being called by something like tinfoil, we need to clean cached data
192 # which may now be invalid
193 bb.parse.clear_cache()
194 bb.parse.BBHandler.cached_statements = {}
195
196 self.ui_cmdline = None
197 self.hashserv = None
198 self.hashservaddr = None
199
200 self.inotify_modified_files = []
201
202 def _process_inotify_updates(server, cooker, abort):
203 cooker.process_inotify_updates()
204 return 1.0
205
206 self.idleCallBackRegister(_process_inotify_updates, self)
207
208 # TOSTOP must not be set or our children will hang when they output
209 try:
210 fd = sys.stdout.fileno()
211 if os.isatty(fd):
212 import termios
213 tcattr = termios.tcgetattr(fd)
214 if tcattr[3] & termios.TOSTOP:
215 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
216 tcattr[3] = tcattr[3] & ~termios.TOSTOP
217 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
218 except UnsupportedOperation:
219 pass
220
221 self.command = bb.command.Command(self)
222 self.state = state.initial
223
224 self.parser = None
225
226 signal.signal(signal.SIGTERM, self.sigterm_exception)
227 # Let SIGHUP exit as SIGTERM
228 signal.signal(signal.SIGHUP, self.sigterm_exception)
229
230 bb.debug(1, "BBCooker startup complete %s" % time.time())
231 sys.stdout.flush()
232
233 def init_configdata(self):
234 if not hasattr(self, "data"):
235 self.initConfigurationData()
236 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
237 sys.stdout.flush()
238 self.handlePRServ()
239
240 def process_inotify_updates(self):
241 for n in [self.confignotifier, self.notifier]:
242 if n.check_events(timeout=0):
243 # read notified events and enqeue them
244 n.read_events()
245 n.process_events()
246
247 def config_notifications(self, event):
248 if event.maskname == "IN_Q_OVERFLOW":
249 bb.warn("inotify event queue overflowed, invalidating caches.")
250 self.parsecache_valid = False
251 self.baseconfig_valid = False
252 bb.parse.clear_cache()
253 return
254 if not event.pathname in self.configwatcher.bbwatchedfiles:
255 return
256 if not event.pathname in self.inotify_modified_files:
257 self.inotify_modified_files.append(event.pathname)
258 self.baseconfig_valid = False
259
260 def notifications(self, event):
261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
268 return
269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
271 self.parsecache_valid = False
272
273 def add_filewatch(self, deps, watcher=None, dirs=False):
274 if not watcher:
275 watcher = self.watcher
276 for i in deps:
277 watcher.bbwatchedfiles.add(i[0])
278 if dirs:
279 f = i[0]
280 else:
281 f = os.path.dirname(i[0])
282 if f in watcher.bbseen:
283 continue
284 watcher.bbseen.add(f)
285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
288 # the parser. The parent directory of these files may not exist, in which case we need
289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
293 watcher.bbwatchedfiles.add(watchtarget)
294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
301 watcher.bbseen.add(f)
302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309
310 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM:
312 bb.warn("Cooker received SIGTERM, shutting down...")
313 elif signum == signal.SIGHUP:
314 bb.warn("Cooker received SIGHUP, shutting down...")
315 self.state = state.forceshutdown
316
317 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems
319 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
320 raise Exception("Illegal state for feature set change")
321 original_featureset = list(self.featureset)
322 for feature in features:
323 self.featureset.setFeature(feature)
324 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
325 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
326 self.reset()
327
328 def initConfigurationData(self):
329
330 self.state = state.initial
331 self.caches_array = []
332
333 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None
335 if hasattr(self, "data"):
336 consolelog = self.data.getVar("BB_CONSOLELOG")
337
338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking()
340
341 all_extra_cache_names = []
342 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
345
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
347
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed!
350 for var in caches_name_array:
351 try:
352 module_name, cache_name = var.split(':')
353 module = __import__(module_name, fromlist=(cache_name,))
354 self.caches_array.append(getattr(module, cache_name))
355 except ImportError as exc:
356 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
357 raise bb.BBHandledException()
358
359 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360 self.databuilder.parseBaseConfiguration()
361 self.data = self.databuilder.data
362 self.data_hash = self.databuilder.data_hash
363 self.extraconfigdata = {}
364
365 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog)
367
368 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
369
370 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
371 self.disableDataTracking()
372
373 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends")
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
376
377 self.baseconfig_valid = True
378 self.parsecache_valid = False
379
380 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration
382 try:
383 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e:
385 bb.fatal("Unable to start PR Server, exitting")
386
387 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket
389 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
393 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
394 self.hashserv.process.start()
395 self.data.setVar("BB_HASHSERVE", self.hashservaddr)
396 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
397 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
398 for mc in self.databuilder.mcdata:
399 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
400
401 bb.parse.init_parser(self.data)
402
403 def enableDataTracking(self):
404 self.configuration.tracking = True
405 if hasattr(self, "data"):
406 self.data.enableTracking()
407
408 def disableDataTracking(self):
409 self.configuration.tracking = False
410 if hasattr(self, "data"):
411 self.data.disableTracking()
412
413 def parseConfiguration(self):
414 self.updateCacheSync()
415
416 # Change nice level if we're asked to
417 nice = self.data.getVar("BB_NICE_LEVEL")
418 if nice:
419 curnice = os.nice(0)
420 nice = int(nice) - curnice
421 buildlog.verbose("Renice to %s " % os.nice(nice))
422
423 if self.recipecaches:
424 del self.recipecaches
425 self.multiconfigs = self.databuilder.mcdata.keys()
426 self.recipecaches = {}
427 for mc in self.multiconfigs:
428 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
429
430 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
431
432 self.parsecache_valid = False
433
434 def updateConfigOpts(self, options, environment, cmdline):
435 self.ui_cmdline = cmdline
436 clean = True
437 for o in options:
438 if o in ['prefile', 'postfile']:
439 # Only these options may require a reparse
440 try:
441 if getattr(self.configuration, o) == options[o]:
442 # Value is the same, no need to mark dirty
443 continue
444 except AttributeError:
445 pass
446 logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
447 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
448 clean = False
449 if hasattr(self.configuration, o):
450 setattr(self.configuration, o, options[o])
451
452 if self.configuration.writeeventlog:
453 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog:
454 bb.event.unregister_UIHhandler(self.eventlog[1])
455 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
456 # we log all events to a file if so directed
457 # register the log file writer as UI Handler
458 writer = EventWriter(self, self.configuration.writeeventlog)
459 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
460 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
461
462 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
463 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
464
465 if hasattr(self, "data"):
466 origenv = bb.data.init()
467 for k in environment:
468 origenv.setVar(k, environment[k])
469 self.data.setVar("BB_ORIGENV", origenv)
470
471 for k in bb.utils.approved_variables():
472 if k in environment and k not in self.configuration.env:
473 logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
474 self.configuration.env[k] = environment[k]
475 clean = False
476 if k in self.configuration.env and k not in environment:
477 logger.debug("Updating environment variable %s (deleted)" % (k))
478 del self.configuration.env[k]
479 clean = False
480 if k not in self.configuration.env and k not in environment:
481 continue
482 if environment[k] != self.configuration.env[k]:
483 logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
484 self.configuration.env[k] = environment[k]
485 clean = False
486
487 # Now update all the variables not in the datastore to match
488 self.configuration.env = environment
489
490 if not clean:
491 logger.debug("Base environment change, triggering reparse")
492 self.reset()
493
494 def runCommands(self, server, data, abort):
495 """
496 Run any queued asynchronous command
497 This is done by the idle handler so it runs in true context rather than
498 tied to any UI.
499 """
500
501 return self.command.runAsyncCommand()
502
503 def showVersions(self):
504
505 (latest_versions, preferred_versions) = self.findProviders()
506
507 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
508 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
509
510 for p in sorted(self.recipecaches[''].pkg_pn):
511 pref = preferred_versions[p]
512 latest = latest_versions[p]
513
514 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
515 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
516
517 if pref == latest:
518 prefstr = ""
519
520 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
521
522 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
523 """
524 Show the outer or per-recipe environment
525 """
526 fn = None
527 envdata = None
528 mc = ''
529 if not pkgs_to_build:
530 pkgs_to_build = []
531
532 orig_tracking = self.configuration.tracking
533 if not orig_tracking:
534 self.enableDataTracking()
535 self.reset()
536
537 def mc_base(p):
538 if p.startswith('mc:'):
539 s = p.split(':')
540 if len(s) == 2:
541 return s[1]
542 return None
543
544 if buildfile:
545 # Parse the configuration here. We need to do it explicitly here since
546 # this showEnvironment() code path doesn't use the cache
547 self.parseConfiguration()
548
549 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
550 fn = self.matchFile(fn, mc)
551 fn = bb.cache.realfn2virtual(fn, cls, mc)
552 elif len(pkgs_to_build) == 1:
553 mc = mc_base(pkgs_to_build[0])
554 if not mc:
555 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
556 if pkgs_to_build[0] in set(ignore.split()):
557 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
558
559 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
560
561 mc = runlist[0][0]
562 fn = runlist[0][3]
563
564 if fn:
565 try:
566 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
567 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn))
568 except Exception as e:
569 parselog.exception("Unable to read %s", fn)
570 raise
571 else:
572 if not mc in self.databuilder.mcdata:
573 bb.fatal('Not multiconfig named "%s" found' % mc)
574 envdata = self.databuilder.mcdata[mc]
575 data.expandKeys(envdata)
576 parse.ast.runAnonFuncs(envdata)
577
578 # Display history
579 with closing(StringIO()) as env:
580 self.data.inchistory.emit(env)
581 logger.plain(env.getvalue())
582
583 # emit variables and shell functions
584 with closing(StringIO()) as env:
585 data.emit_env(env, envdata, True)
586 logger.plain(env.getvalue())
587
588 # emit the metadata which isnt valid shell
589 for e in sorted(envdata.keys()):
590 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
591 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
592
593 if not orig_tracking:
594 self.disableDataTracking()
595 self.reset()
596
597 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
598 """
599 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
600 """
601 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
602
603 # A task of None means use the default task
604 if task is None:
605 task = self.configuration.cmd
606 if not task.startswith("do_"):
607 task = "do_%s" % task
608
609 targetlist = self.checkPackages(pkgs_to_build, task)
610 fulltargetlist = []
611 defaulttask_implicit = ''
612 defaulttask_explicit = False
613 wildcard = False
614
615 # Wild card expansion:
616 # Replace string such as "mc:*:bash"
617 # into "mc:A:bash mc:B:bash bash"
618 for k in targetlist:
619 if k.startswith("mc:") and k.count(':') >= 2:
620 if wildcard:
621 bb.fatal('multiconfig conflict')
622 if k.split(":")[1] == "*":
623 wildcard = True
624 for mc in self.multiconfigs:
625 if mc:
626 fulltargetlist.append(k.replace('*', mc))
627 # implicit default task
628 else:
629 defaulttask_implicit = k.split(":")[2]
630 else:
631 fulltargetlist.append(k)
632 else:
633 defaulttask_explicit = True
634 fulltargetlist.append(k)
635
636 if not defaulttask_explicit and defaulttask_implicit != '':
637 fulltargetlist.append(defaulttask_implicit)
638
639 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
640 taskdata = {}
641 localdata = {}
642
643 for mc in self.multiconfigs:
644 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
645 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
646 bb.data.expandKeys(localdata[mc])
647
648 current = 0
649 runlist = []
650 for k in fulltargetlist:
651 origk = k
652 mc = ""
653 if k.startswith("mc:") and k.count(':') >= 2:
654 mc = k.split(":")[1]
655 k = ":".join(k.split(":")[2:])
656 ktask = task
657 if ":do_" in k:
658 k2 = k.split(":do_")
659 k = k2[0]
660 ktask = k2[1]
661
662 if mc not in self.multiconfigs:
663 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
664
665 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
666 current += 1
667 if not ktask.startswith("do_"):
668 ktask = "do_%s" % ktask
669 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
670 # e.g. in ASSUME_PROVIDED
671 continue
672 fn = taskdata[mc].build_targets[k][0]
673 runlist.append([mc, k, ktask, fn])
674 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
675
676 havemc = False
677 for mc in self.multiconfigs:
678 if taskdata[mc].get_mcdepends():
679 havemc = True
680
681 # No need to do check providers if there are no mcdeps or not an mc build
682 if havemc or len(self.multiconfigs) > 1:
683 seen = set()
684 new = True
685 # Make sure we can provide the multiconfig dependency
686 while new:
687 mcdeps = set()
688 # Add unresolved first, so we can get multiconfig indirect dependencies on time
689 for mc in self.multiconfigs:
690 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
691 mcdeps |= set(taskdata[mc].get_mcdepends())
692 new = False
693 for mc in self.multiconfigs:
694 for k in mcdeps:
695 if k in seen:
696 continue
697 l = k.split(':')
698 depmc = l[2]
699 if depmc not in self.multiconfigs:
700 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
701 else:
702 logger.debug("Adding providers for multiconfig dependency %s" % l[3])
703 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
704 seen.add(k)
705 new = True
706
707 for mc in self.multiconfigs:
708 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
709
710 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
711 return taskdata, runlist
712
713 def prepareTreeData(self, pkgs_to_build, task):
714 """
715 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
716 """
717
718 # We set abort to False here to prevent unbuildable targets raising
719 # an exception when we're just generating data
720 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
721
722 return runlist, taskdata
723
724 ######## WARNING : this function requires cache_extra to be enabled ########
725
726 def generateTaskDepTreeData(self, pkgs_to_build, task):
727 """
728 Create a dependency graph of pkgs_to_build including reverse dependency
729 information.
730 """
731 if not task.startswith("do_"):
732 task = "do_%s" % task
733
734 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
735 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
736 rq.rqdata.prepare()
737 return self.buildDependTree(rq, taskdata)
738
739 @staticmethod
740 def add_mc_prefix(mc, pn):
741 if mc:
742 return "mc:%s:%s" % (mc, pn)
743 return pn
744
745 def buildDependTree(self, rq, taskdata):
746 seen_fns = []
747 depend_tree = {}
748 depend_tree["depends"] = {}
749 depend_tree["tdepends"] = {}
750 depend_tree["pn"] = {}
751 depend_tree["rdepends-pn"] = {}
752 depend_tree["packages"] = {}
753 depend_tree["rdepends-pkg"] = {}
754 depend_tree["rrecs-pkg"] = {}
755 depend_tree['providermap'] = {}
756 depend_tree["layer-priorities"] = self.bbfile_config_priorities
757
758 for mc in taskdata:
759 for name, fn in list(taskdata[mc].get_providermap().items()):
760 pn = self.recipecaches[mc].pkg_fn[fn]
761 pn = self.add_mc_prefix(mc, pn)
762 if name != pn:
763 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
764 depend_tree['providermap'][name] = (pn, version)
765
766 for tid in rq.rqdata.runtaskentries:
767 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
768 pn = self.recipecaches[mc].pkg_fn[taskfn]
769 pn = self.add_mc_prefix(mc, pn)
770 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
771 if pn not in depend_tree["pn"]:
772 depend_tree["pn"][pn] = {}
773 depend_tree["pn"][pn]["filename"] = taskfn
774 depend_tree["pn"][pn]["version"] = version
775 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
776
777 # if we have extra caches, list all attributes they bring in
778 extra_info = []
779 for cache_class in self.caches_array:
780 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
781 cachefields = getattr(cache_class, 'cachefields', [])
782 extra_info = extra_info + cachefields
783
784 # for all attributes stored, add them to the dependency tree
785 for ei in extra_info:
786 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
787
788
789 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
790 if not dotname in depend_tree["tdepends"]:
791 depend_tree["tdepends"][dotname] = []
792 for dep in rq.rqdata.runtaskentries[tid].depends:
793 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
794 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
795 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
796 if taskfn not in seen_fns:
797 seen_fns.append(taskfn)
798 packages = []
799
800 depend_tree["depends"][pn] = []
801 for dep in taskdata[mc].depids[taskfn]:
802 depend_tree["depends"][pn].append(dep)
803
804 depend_tree["rdepends-pn"][pn] = []
805 for rdep in taskdata[mc].rdepids[taskfn]:
806 depend_tree["rdepends-pn"][pn].append(rdep)
807
808 rdepends = self.recipecaches[mc].rundeps[taskfn]
809 for package in rdepends:
810 depend_tree["rdepends-pkg"][package] = []
811 for rdepend in rdepends[package]:
812 depend_tree["rdepends-pkg"][package].append(rdepend)
813 packages.append(package)
814
815 rrecs = self.recipecaches[mc].runrecs[taskfn]
816 for package in rrecs:
817 depend_tree["rrecs-pkg"][package] = []
818 for rdepend in rrecs[package]:
819 depend_tree["rrecs-pkg"][package].append(rdepend)
820 if not package in packages:
821 packages.append(package)
822
823 for package in packages:
824 if package not in depend_tree["packages"]:
825 depend_tree["packages"][package] = {}
826 depend_tree["packages"][package]["pn"] = pn
827 depend_tree["packages"][package]["filename"] = taskfn
828 depend_tree["packages"][package]["version"] = version
829
830 return depend_tree
831
832 ######## WARNING : this function requires cache_extra to be enabled ########
833 def generatePkgDepTreeData(self, pkgs_to_build, task):
834 """
835 Create a dependency tree of pkgs_to_build, returning the data.
836 """
837 if not task.startswith("do_"):
838 task = "do_%s" % task
839
840 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
841
842 seen_fns = []
843 depend_tree = {}
844 depend_tree["depends"] = {}
845 depend_tree["pn"] = {}
846 depend_tree["rdepends-pn"] = {}
847 depend_tree["rdepends-pkg"] = {}
848 depend_tree["rrecs-pkg"] = {}
849
850 # if we have extra caches, list all attributes they bring in
851 extra_info = []
852 for cache_class in self.caches_array:
853 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
854 cachefields = getattr(cache_class, 'cachefields', [])
855 extra_info = extra_info + cachefields
856
857 tids = []
858 for mc in taskdata:
859 for tid in taskdata[mc].taskentries:
860 tids.append(tid)
861
862 for tid in tids:
863 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
864
865 pn = self.recipecaches[mc].pkg_fn[taskfn]
866 pn = self.add_mc_prefix(mc, pn)
867
868 if pn not in depend_tree["pn"]:
869 depend_tree["pn"][pn] = {}
870 depend_tree["pn"][pn]["filename"] = taskfn
871 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
872 depend_tree["pn"][pn]["version"] = version
873 rdepends = self.recipecaches[mc].rundeps[taskfn]
874 rrecs = self.recipecaches[mc].runrecs[taskfn]
875 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
876
877 # for all extra attributes stored, add them to the dependency tree
878 for ei in extra_info:
879 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
880
881 if taskfn not in seen_fns:
882 seen_fns.append(taskfn)
883
884 depend_tree["depends"][pn] = []
885 for dep in taskdata[mc].depids[taskfn]:
886 pn_provider = ""
887 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
888 fn_provider = taskdata[mc].build_targets[dep][0]
889 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
890 else:
891 pn_provider = dep
892 pn_provider = self.add_mc_prefix(mc, pn_provider)
893 depend_tree["depends"][pn].append(pn_provider)
894
895 depend_tree["rdepends-pn"][pn] = []
896 for rdep in taskdata[mc].rdepids[taskfn]:
897 pn_rprovider = ""
898 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
899 fn_rprovider = taskdata[mc].run_targets[rdep][0]
900 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
901 else:
902 pn_rprovider = rdep
903 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
904 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
905
906 depend_tree["rdepends-pkg"].update(rdepends)
907 depend_tree["rrecs-pkg"].update(rrecs)
908
909 return depend_tree
910
911 def generateDepTreeEvent(self, pkgs_to_build, task):
912 """
913 Create a task dependency graph of pkgs_to_build.
914 Generate an event with the result
915 """
916 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
917 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
918
919 def generateDotGraphFiles(self, pkgs_to_build, task):
920 """
921 Create a task dependency graph of pkgs_to_build.
922 Save the result to a set of .dot files.
923 """
924
925 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
926
927 with open('pn-buildlist', 'w') as f:
928 for pn in depgraph["pn"]:
929 f.write(pn + "\n")
930 logger.info("PN build list saved to 'pn-buildlist'")
931
932 # Remove old format output files to ensure no confusion with stale data
933 try:
934 os.unlink('pn-depends.dot')
935 except FileNotFoundError:
936 pass
937 try:
938 os.unlink('package-depends.dot')
939 except FileNotFoundError:
940 pass
941 try:
942 os.unlink('recipe-depends.dot')
943 except FileNotFoundError:
944 pass
945
946 with open('task-depends.dot', 'w') as f:
947 f.write("digraph depends {\n")
948 for task in sorted(depgraph["tdepends"]):
949 (pn, taskname) = task.rsplit(".", 1)
950 fn = depgraph["pn"][pn]["filename"]
951 version = depgraph["pn"][pn]["version"]
952 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
953 for dep in sorted(depgraph["tdepends"][task]):
954 f.write('"%s" -> "%s"\n' % (task, dep))
955 f.write("}\n")
956 logger.info("Task dependencies saved to 'task-depends.dot'")
957
958 def show_appends_with_no_recipes(self):
959 appends_without_recipes = {}
960 # Determine which bbappends haven't been applied
961 for mc in self.multiconfigs:
962 # First get list of recipes, including skipped
963 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
964 recipefns.extend(self.skiplist.keys())
965
966 # Work out list of bbappends that have been applied
967 applied_appends = []
968 for fn in recipefns:
969 applied_appends.extend(self.collections[mc].get_file_appends(fn))
970
971 appends_without_recipes[mc] = []
972 for _, appendfn in self.collections[mc].bbappends:
973 if not appendfn in applied_appends:
974 appends_without_recipes[mc].append(appendfn)
975
976 msgs = []
977 for mc in sorted(appends_without_recipes.keys()):
978 if appends_without_recipes[mc]:
979 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
980 '\n '.join(appends_without_recipes[mc])))
981
982 if msgs:
983 msg = "\n".join(msgs)
984 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
985 False) or "no"
986 if warn_only.lower() in ("1", "yes", "true"):
987 bb.warn(msg)
988 else:
989 bb.fatal(msg)
990
991 def handlePrefProviders(self):
992
993 for mc in self.multiconfigs:
994 localdata = data.createCopy(self.databuilder.mcdata[mc])
995 bb.data.expandKeys(localdata)
996
997 # Handle PREFERRED_PROVIDERS
998 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
999 try:
1000 (providee, provider) = p.split(':')
1001 except:
1002 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
1003 continue
1004 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
1005 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
1006 self.recipecaches[mc].preferred[providee] = provider
1007
1008 def findConfigFilePath(self, configfile):
1009 """
1010 Find the location on disk of configfile and if it exists and was parsed by BitBake
1011 emit the ConfigFilePathFound event with the path to the file.
1012 """
1013 path = bb.cookerdata.findConfigFile(configfile, self.data)
1014 if not path:
1015 return
1016
1017 # Generate a list of parsed configuration files by searching the files
1018 # listed in the __depends and __base_depends variables with a .conf suffix.
1019 conffiles = []
1020 dep_files = self.data.getVar('__base_depends', False) or []
1021 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
1022
1023 for f in dep_files:
1024 if f[0].endswith(".conf"):
1025 conffiles.append(f[0])
1026
1027 _, conf, conffile = path.rpartition("conf/")
1028 match = os.path.join(conf, conffile)
1029 # Try and find matches for conf/conffilename.conf as we don't always
1030 # have the full path to the file.
1031 for cfg in conffiles:
1032 if cfg.endswith(match):
1033 bb.event.fire(bb.event.ConfigFilePathFound(path),
1034 self.data)
1035 break
1036
1037 def findFilesMatchingInDir(self, filepattern, directory):
1038 """
1039 Searches for files containing the substring 'filepattern' which are children of
1040 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1041 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1042 or to find all machine configuration files one could call:
1043 findFilesMatchingInDir(self, '.conf', 'conf/machine')
1044 """
1045
1046 matches = []
1047 bbpaths = self.data.getVar('BBPATH').split(':')
1048 for path in bbpaths:
1049 dirpath = os.path.join(path, directory)
1050 if os.path.exists(dirpath):
1051 for root, dirs, files in os.walk(dirpath):
1052 for f in files:
1053 if filepattern in f:
1054 matches.append(f)
1055
1056 if matches:
1057 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1058
1059 def findProviders(self, mc=''):
1060 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1061
1062 def findBestProvider(self, pn, mc=''):
1063 if pn in self.recipecaches[mc].providers:
1064 filenames = self.recipecaches[mc].providers[pn]
1065 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1066 filename = eligible[0]
1067 return None, None, None, filename
1068 elif pn in self.recipecaches[mc].pkg_pn:
1069 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1070 else:
1071 return None, None, None, None
1072
1073 def findConfigFiles(self, varname):
1074 """
1075 Find config files which are appropriate values for varname.
1076 i.e. MACHINE, DISTRO
1077 """
1078 possible = []
1079 var = varname.lower()
1080
1081 data = self.data
1082 # iterate configs
1083 bbpaths = data.getVar('BBPATH').split(':')
1084 for path in bbpaths:
1085 confpath = os.path.join(path, "conf", var)
1086 if os.path.exists(confpath):
1087 for root, dirs, files in os.walk(confpath):
1088 # get all child files, these are appropriate values
1089 for f in files:
1090 val, sep, end = f.rpartition('.')
1091 if end == 'conf':
1092 possible.append(val)
1093
1094 if possible:
1095 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1096
1097 def findInheritsClass(self, klass):
1098 """
1099 Find all recipes which inherit the specified class
1100 """
1101 pkg_list = []
1102
1103 for pfn in self.recipecaches[''].pkg_fn:
1104 inherits = self.recipecaches[''].inherits.get(pfn, None)
1105 if inherits and klass in inherits:
1106 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
1107
1108 return pkg_list
1109
1110 def generateTargetsTree(self, klass=None, pkgs=None):
1111 """
1112 Generate a dependency tree of buildable targets
1113 Generate an event with the result
1114 """
1115 # if the caller hasn't specified a pkgs list default to universe
1116 if not pkgs:
1117 pkgs = ['universe']
1118 # if inherited_class passed ensure all recipes which inherit the
1119 # specified class are included in pkgs
1120 if klass:
1121 extra_pkgs = self.findInheritsClass(klass)
1122 pkgs = pkgs + extra_pkgs
1123
1124 # generate a dependency tree for all our packages
1125 tree = self.generatePkgDepTreeData(pkgs, 'build')
1126 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1127
1128 def interactiveMode( self ):
1129 """Drop off into a shell"""
1130 try:
1131 from bb import shell
1132 except ImportError:
1133 parselog.exception("Interactive mode not available")
1134 raise bb.BBHandledException()
1135 else:
1136 shell.start( self )
1137
1138
1139 def handleCollections(self, collections):
1140 """Handle collections"""
1141 errors = False
1142 self.bbfile_config_priorities = []
1143 if collections:
1144 collection_priorities = {}
1145 collection_depends = {}
1146 collection_list = collections.split()
1147 min_prio = 0
1148 for c in collection_list:
1149 bb.debug(1,'Processing %s in collection list' % (c))
1150
1151 # Get collection priority if defined explicitly
1152 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
1153 if priority:
1154 try:
1155 prio = int(priority)
1156 except ValueError:
1157 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1158 errors = True
1159 if min_prio == 0 or prio < min_prio:
1160 min_prio = prio
1161 collection_priorities[c] = prio
1162 else:
1163 collection_priorities[c] = None
1164
1165 # Check dependencies and store information for priority calculation
1166 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
1167 if deps:
1168 try:
1169 depDict = bb.utils.explode_dep_versions2(deps)
1170 except bb.utils.VersionStringException as vse:
1171 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1172 for dep, oplist in list(depDict.items()):
1173 if dep in collection_list:
1174 for opstr in oplist:
1175 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
1176 (op, depver) = opstr.split()
1177 if layerver:
1178 try:
1179 res = bb.utils.vercmp_string_op(layerver, depver, op)
1180 except bb.utils.VersionStringException as vse:
1181 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1182 if not res:
1183 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1184 errors = True
1185 else:
1186 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1187 errors = True
1188 else:
1189 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1190 errors = True
1191 collection_depends[c] = list(depDict.keys())
1192 else:
1193 collection_depends[c] = []
1194
1195 # Check recommends and store information for priority calculation
1196 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
1197 if recs:
1198 try:
1199 recDict = bb.utils.explode_dep_versions2(recs)
1200 except bb.utils.VersionStringException as vse:
1201 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1202 for rec, oplist in list(recDict.items()):
1203 if rec in collection_list:
1204 if oplist:
1205 opstr = oplist[0]
1206 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
1207 if layerver:
1208 (op, recver) = opstr.split()
1209 try:
1210 res = bb.utils.vercmp_string_op(layerver, recver, op)
1211 except bb.utils.VersionStringException as vse:
1212 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1213 if not res:
1214 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1215 continue
1216 else:
1217 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1218 continue
1219 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1220 collection_depends[c].append(rec)
1221 else:
1222 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1223
1224 # Recursively work out collection priorities based on dependencies
1225 def calc_layer_priority(collection):
1226 if not collection_priorities[collection]:
1227 max_depprio = min_prio
1228 for dep in collection_depends[collection]:
1229 calc_layer_priority(dep)
1230 depprio = collection_priorities[dep]
1231 if depprio > max_depprio:
1232 max_depprio = depprio
1233 max_depprio += 1
1234 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1235 collection_priorities[collection] = max_depprio
1236
1237 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1238 for c in collection_list:
1239 calc_layer_priority(c)
1240 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
1241 if regex is None:
1242 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1243 errors = True
1244 continue
1245 elif regex == "":
1246 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1247 cre = re.compile('^NULL$')
1248 errors = False
1249 else:
1250 try:
1251 cre = re.compile(regex)
1252 except re.error:
1253 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1254 errors = True
1255 continue
1256 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1257 if errors:
1258 # We've already printed the actual error(s)
1259 raise CollectionError("Errors during parsing layer configuration")
1260
1261 def buildSetVars(self):
1262 """
1263 Setup any variables needed before starting a build
1264 """
1265 t = time.gmtime()
1266 for mc in self.databuilder.mcdata:
1267 ds = self.databuilder.mcdata[mc]
1268 if not ds.getVar("BUILDNAME", False):
1269 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1270 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1271 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1272 ds.setVar("TIME", time.strftime('%H%M%S', t))
1273
1274 def reset_mtime_caches(self):
1275 """
1276 Reset mtime caches - this is particularly important when memory resident as something
1277 which is cached is not unlikely to have changed since the last invocation (e.g. a
1278 file associated with a recipe might have been modified by the user).
1279 """
1280 build.reset_cache()
1281 bb.fetch._checksum_cache.mtime_cache.clear()
1282 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1283 if siggen_cache:
1284 bb.parse.siggen.checksum_cache.mtime_cache.clear()
1285
1286 def matchFiles(self, bf, mc=''):
1287 """
1288 Find the .bb files which match the expression in 'buildfile'.
1289 """
1290 if bf.startswith("/") or bf.startswith("../"):
1291 bf = os.path.abspath(bf)
1292
1293 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1294 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1295 try:
1296 os.stat(bf)
1297 bf = os.path.abspath(bf)
1298 return [bf]
1299 except OSError:
1300 regexp = re.compile(bf)
1301 matches = []
1302 for f in filelist:
1303 if regexp.search(f) and os.path.isfile(f):
1304 matches.append(f)
1305 return matches
1306
1307 def matchFile(self, buildfile, mc=''):
1308 """
1309 Find the .bb file which matches the expression in 'buildfile'.
1310 Raise an error if multiple files
1311 """
1312 matches = self.matchFiles(buildfile, mc)
1313 if len(matches) != 1:
1314 if matches:
1315 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1316 if matches:
1317 for f in matches:
1318 msg += "\n %s" % f
1319 parselog.error(msg)
1320 else:
1321 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1322 raise NoSpecificMatch
1323 return matches[0]
1324
1325 def buildFile(self, buildfile, task):
1326 """
1327 Build the file matching regexp buildfile
1328 """
1329 bb.event.fire(bb.event.BuildInit(), self.data)
1330
1331 # Too many people use -b because they think it's how you normally
1332 # specify a target to be built, so show a warning
1333 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1334
1335 self.buildFileInternal(buildfile, task)
1336
1337 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1338 """
1339 Build the file matching regexp buildfile
1340 """
1341
1342 # Parse the configuration here. We need to do it explicitly here since
1343 # buildFile() doesn't use the cache
1344 self.parseConfiguration()
1345
1346 # If we are told to do the None task then query the default task
1347 if task is None:
1348 task = self.configuration.cmd
1349 if not task.startswith("do_"):
1350 task = "do_%s" % task
1351
1352 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
1353 fn = self.matchFile(fn, mc)
1354
1355 self.buildSetVars()
1356 self.reset_mtime_caches()
1357
1358 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
1359
1360 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn))
1361 infos = dict(infos)
1362
1363 fn = bb.cache.realfn2virtual(fn, cls, mc)
1364 try:
1365 info_array = infos[fn]
1366 except KeyError:
1367 bb.fatal("%s does not exist" % fn)
1368
1369 if info_array[0].skipped:
1370 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1371
1372 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
1373
1374 # Tweak some variables
1375 item = info_array[0].pn
1376 self.recipecaches[mc].ignored_dependencies = set()
1377 self.recipecaches[mc].bbfile_priority[fn] = 1
1378 self.configuration.limited_deps = True
1379
1380 # Remove external dependencies
1381 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1382 self.recipecaches[mc].deps[fn] = []
1383 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1384 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1385
1386 # Invalidate task for target if force mode active
1387 if self.configuration.force:
1388 logger.verbose("Invalidate task %s, %s", task, fn)
1389 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
1390
1391 # Setup taskdata structure
1392 taskdata = {}
1393 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
1394 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1395
1396 if quietlog:
1397 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1398 bb.runqueue.logger.setLevel(logging.WARNING)
1399
1400 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1401 if fireevents:
1402 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1403
1404 # Execute the runqueue
1405 runlist = [[mc, item, task, fn]]
1406
1407 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1408
1409 def buildFileIdle(server, rq, abort):
1410
1411 msg = None
1412 interrupted = 0
1413 if abort or self.state == state.forceshutdown:
1414 rq.finish_runqueue(True)
1415 msg = "Forced shutdown"
1416 interrupted = 2
1417 elif self.state == state.shutdown:
1418 rq.finish_runqueue(False)
1419 msg = "Stopped build"
1420 interrupted = 1
1421 failures = 0
1422 try:
1423 retval = rq.execute_runqueue()
1424 except runqueue.TaskFailure as exc:
1425 failures += len(exc.args)
1426 retval = False
1427 except SystemExit as exc:
1428 self.command.finishAsyncCommand(str(exc))
1429 if quietlog:
1430 bb.runqueue.logger.setLevel(rqloglevel)
1431 return False
1432
1433 if not retval:
1434 if fireevents:
1435 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1436 self.command.finishAsyncCommand(msg)
1437 # We trashed self.recipecaches above
1438 self.parsecache_valid = False
1439 self.configuration.limited_deps = False
1440 bb.parse.siggen.reset(self.data)
1441 if quietlog:
1442 bb.runqueue.logger.setLevel(rqloglevel)
1443 return False
1444 if retval is True:
1445 return True
1446 return retval
1447
1448 self.idleCallBackRegister(buildFileIdle, rq)
1449
1450 def buildTargets(self, targets, task):
1451 """
1452 Attempt to build the targets specified
1453 """
1454
1455 def buildTargetsIdle(server, rq, abort):
1456 msg = None
1457 interrupted = 0
1458 if abort or self.state == state.forceshutdown:
1459 rq.finish_runqueue(True)
1460 msg = "Forced shutdown"
1461 interrupted = 2
1462 elif self.state == state.shutdown:
1463 rq.finish_runqueue(False)
1464 msg = "Stopped build"
1465 interrupted = 1
1466 failures = 0
1467 try:
1468 retval = rq.execute_runqueue()
1469 except runqueue.TaskFailure as exc:
1470 failures += len(exc.args)
1471 retval = False
1472 except SystemExit as exc:
1473 self.command.finishAsyncCommand(str(exc))
1474 return False
1475
1476 if not retval:
1477 try:
1478 for mc in self.multiconfigs:
1479 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1480 finally:
1481 self.command.finishAsyncCommand(msg)
1482 return False
1483 if retval is True:
1484 return True
1485 return retval
1486
1487 self.reset_mtime_caches()
1488 self.buildSetVars()
1489
1490 # If we are told to do the None task then query the default task
1491 if task is None:
1492 task = self.configuration.cmd
1493
1494 if not task.startswith("do_"):
1495 task = "do_%s" % task
1496
1497 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1498
1499 bb.event.fire(bb.event.BuildInit(packages), self.data)
1500
1501 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
1502
1503 buildname = self.data.getVar("BUILDNAME", False)
1504
1505 # make targets to always look as <target>:do_<task>
1506 ntargets = []
1507 for target in runlist:
1508 if target[0]:
1509 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
1510 ntargets.append("%s:%s" % (target[1], target[2]))
1511
1512 for mc in self.multiconfigs:
1513 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1514
1515 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1516 if 'universe' in targets:
1517 rq.rqdata.warn_multi_bb = True
1518
1519 self.idleCallBackRegister(buildTargetsIdle, rq)
1520
1521
1522 def getAllKeysWithFlags(self, flaglist):
1523 dump = {}
1524 for k in self.data.keys():
1525 try:
1526 expand = True
1527 flags = self.data.getVarFlags(k)
1528 if flags and "func" in flags and "python" in flags:
1529 expand = False
1530 v = self.data.getVar(k, expand)
1531 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1532 dump[k] = {
1533 'v' : str(v) ,
1534 'history' : self.data.varhistory.variable(k),
1535 }
1536 for d in flaglist:
1537 if flags and d in flags:
1538 dump[k][d] = flags[d]
1539 else:
1540 dump[k][d] = None
1541 except Exception as e:
1542 print(e)
1543 return dump
1544
1545
1546 def updateCacheSync(self):
1547 if self.state == state.running:
1548 return
1549
1550 # reload files for which we got notifications
1551 for p in self.inotify_modified_files:
1552 bb.parse.update_cache(p)
1553 if p in bb.parse.BBHandler.cached_statements:
1554 del bb.parse.BBHandler.cached_statements[p]
1555 self.inotify_modified_files = []
1556
1557 if not self.baseconfig_valid:
1558 logger.debug("Reloading base configuration data")
1559 self.initConfigurationData()
1560 self.handlePRServ()
1561
1562 # This is called for all async commands when self.state != running
1563 def updateCache(self):
1564 if self.state == state.running:
1565 return
1566
1567 if self.state in (state.shutdown, state.forceshutdown, state.error):
1568 if hasattr(self.parser, 'shutdown'):
1569 self.parser.shutdown(clean=False, force = True)
1570 self.parser.final_cleanup()
1571 raise bb.BBHandledException()
1572
1573 if self.state != state.parsing:
1574 self.updateCacheSync()
1575
1576 if self.state != state.parsing and not self.parsecache_valid:
1577 bb.parse.siggen.reset(self.data)
1578 self.parseConfiguration ()
1579 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1580 for mc in self.multiconfigs:
1581 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
1582
1583 for mc in self.multiconfigs:
1584 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
1585 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
1586
1587 for dep in self.configuration.extra_assume_provided:
1588 self.recipecaches[mc].ignored_dependencies.add(dep)
1589
1590 self.collections = {}
1591
1592 mcfilelist = {}
1593 total_masked = 0
1594 searchdirs = set()
1595 for mc in self.multiconfigs:
1596 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1597 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1598
1599 mcfilelist[mc] = filelist
1600 total_masked += masked
1601 searchdirs |= set(search)
1602
1603 # Add inotify watches for directories searched for bb/bbappend files
1604 for dirent in searchdirs:
1605 self.add_filewatch([[dirent]], dirs=True)
1606
1607 self.parser = CookerParser(self, mcfilelist, total_masked)
1608 self.parsecache_valid = True
1609
1610 self.state = state.parsing
1611
1612 if not self.parser.parse_next():
1613 collectlog.debug(1, "parsing complete")
1614 if self.parser.error:
1615 raise bb.BBHandledException()
1616 self.show_appends_with_no_recipes()
1617 self.handlePrefProviders()
1618 for mc in self.multiconfigs:
1619 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1620 self.state = state.running
1621
1622 # Send an event listing all stamps reachable after parsing
1623 # which the metadata may use to clean up stale data
1624 for mc in self.multiconfigs:
1625 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1626 bb.event.fire(event, self.databuilder.mcdata[mc])
1627 return None
1628
1629 return True
1630
1631 def checkPackages(self, pkgs_to_build, task=None):
1632
1633 # Return a copy, don't modify the original
1634 pkgs_to_build = pkgs_to_build[:]
1635
1636 if len(pkgs_to_build) == 0:
1637 raise NothingToBuild
1638
1639 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
1640 for pkg in pkgs_to_build.copy():
1641 if pkg in ignore:
1642 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1643 if pkg.startswith("multiconfig:"):
1644 pkgs_to_build.remove(pkg)
1645 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
1646
1647 if 'world' in pkgs_to_build:
1648 pkgs_to_build.remove('world')
1649 for mc in self.multiconfigs:
1650 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1651 for t in self.recipecaches[mc].world_target:
1652 if mc:
1653 t = "mc:" + mc + ":" + t
1654 pkgs_to_build.append(t)
1655
1656 if 'universe' in pkgs_to_build:
1657 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1658 parselog.debug(1, "collating packages for \"universe\"")
1659 pkgs_to_build.remove('universe')
1660 for mc in self.multiconfigs:
1661 for t in self.recipecaches[mc].universe_target:
1662 if task:
1663 foundtask = False
1664 for provider_fn in self.recipecaches[mc].providers[t]:
1665 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1666 foundtask = True
1667 break
1668 if not foundtask:
1669 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1670 continue
1671 if mc:
1672 t = "mc:" + mc + ":" + t
1673 pkgs_to_build.append(t)
1674
1675 return pkgs_to_build
1676
1677 def pre_serve(self):
1678 return
1679
1680 def post_serve(self):
1681 self.shutdown(force=True)
1682 prserv.serv.auto_shutdown()
1683 if self.hashserv:
1684 self.hashserv.process.terminate()
1685 self.hashserv.process.join()
1686 if hasattr(self, "data"):
1687 bb.event.fire(CookerExit(), self.data)
1688
1689 def shutdown(self, force = False):
1690 if force:
1691 self.state = state.forceshutdown
1692 else:
1693 self.state = state.shutdown
1694
1695 if self.parser:
1696 self.parser.shutdown(clean=not force, force=force)
1697 self.parser.final_cleanup()
1698
1699 def finishcommand(self):
1700 self.state = state.initial
1701
1702 def reset(self):
1703 self.initConfigurationData()
1704 self.handlePRServ()
1705
1706 def clientComplete(self):
1707 """Called when the client is done using the server"""
1708 self.finishcommand()
1709 self.extraconfigdata = {}
1710 self.command.reset()
1711 if hasattr(self, "data"):
1712 self.databuilder.reset()
1713 self.data = self.databuilder.data
1714 self.parsecache_valid = False
1715 self.baseconfig_valid = False
1716
1717
1718class CookerExit(bb.event.Event):
1719 """
1720 Notify clients of the Cooker shutdown
1721 """
1722
1723 def __init__(self):
1724 bb.event.Event.__init__(self)
1725
1726
1727class CookerCollectFiles(object):
1728 def __init__(self, priorities, mc=''):
1729 self.mc = mc
1730 self.bbappends = []
1731 # Priorities is a list of tupples, with the second element as the pattern.
1732 # We need to sort the list with the longest pattern first, and so on to
1733 # the shortest. This allows nested layers to be properly evaluated.
1734 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1735
1736 def calc_bbfile_priority(self, filename):
1737 for _, _, regex, pri in self.bbfile_config_priorities:
1738 if regex.match(filename):
1739 return pri, regex
1740 return 0, None
1741
1742 def get_bbfiles(self):
1743 """Get list of default .bb files by reading out the current directory"""
1744 path = os.getcwd()
1745 contents = os.listdir(path)
1746 bbfiles = []
1747 for f in contents:
1748 if f.endswith(".bb"):
1749 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1750 return bbfiles
1751
1752 def find_bbfiles(self, path):
1753 """Find all the .bb and .bbappend files in a directory"""
1754 found = []
1755 for dir, dirs, files in os.walk(path):
1756 for ignored in ('SCCS', 'CVS', '.svn'):
1757 if ignored in dirs:
1758 dirs.remove(ignored)
1759 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1760
1761 return found
1762
1763 def collect_bbfiles(self, config, eventdata):
1764 """Collect all available .bb build files"""
1765 masked = 0
1766
1767 collectlog.debug(1, "collecting .bb files")
1768
1769 files = (config.getVar( "BBFILES") or "").split()
1770
1771 # Sort files by priority
1772 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1773 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1774
1775 if not len(files):
1776 files = self.get_bbfiles()
1777
1778 if not len(files):
1779 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1780 bb.event.fire(CookerExit(), eventdata)
1781
1782 # We need to track where we look so that we can add inotify watches. There
1783 # is no nice way to do this, this is horrid. We intercept the os.listdir()
1784 # (or os.scandir() for python 3.6+) calls while we run glob().
1785 origlistdir = os.listdir
1786 if hasattr(os, 'scandir'):
1787 origscandir = os.scandir
1788 searchdirs = []
1789
1790 def ourlistdir(d):
1791 searchdirs.append(d)
1792 return origlistdir(d)
1793
1794 def ourscandir(d):
1795 searchdirs.append(d)
1796 return origscandir(d)
1797
1798 os.listdir = ourlistdir
1799 if hasattr(os, 'scandir'):
1800 os.scandir = ourscandir
1801 try:
1802 # Can't use set here as order is important
1803 newfiles = []
1804 for f in files:
1805 if os.path.isdir(f):
1806 dirfiles = self.find_bbfiles(f)
1807 for g in dirfiles:
1808 if g not in newfiles:
1809 newfiles.append(g)
1810 else:
1811 globbed = glob.glob(f)
1812 if not globbed and os.path.exists(f):
1813 globbed = [f]
1814 # glob gives files in order on disk. Sort to be deterministic.
1815 for g in sorted(globbed):
1816 if g not in newfiles:
1817 newfiles.append(g)
1818 finally:
1819 os.listdir = origlistdir
1820 if hasattr(os, 'scandir'):
1821 os.scandir = origscandir
1822
1823 bbmask = config.getVar('BBMASK')
1824
1825 if bbmask:
1826 # First validate the individual regular expressions and ignore any
1827 # that do not compile
1828 bbmasks = []
1829 for mask in bbmask.split():
1830 # When constructing an older style single regex, it's possible for BBMASK
1831 # to end up beginning with '|', which matches and masks _everything_.
1832 if mask.startswith("|"):
1833 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1834 mask = mask[1:]
1835 try:
1836 re.compile(mask)
1837 bbmasks.append(mask)
1838 except sre_constants.error:
1839 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1840
1841 # Then validate the combined regular expressions. This should never
1842 # fail, but better safe than sorry...
1843 bbmask = "|".join(bbmasks)
1844 try:
1845 bbmask_compiled = re.compile(bbmask)
1846 except sre_constants.error:
1847 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1848 bbmask = None
1849
1850 bbfiles = []
1851 bbappend = []
1852 for f in newfiles:
1853 if bbmask and bbmask_compiled.search(f):
1854 collectlog.debug(1, "skipping masked file %s", f)
1855 masked += 1
1856 continue
1857 if f.endswith('.bb'):
1858 bbfiles.append(f)
1859 elif f.endswith('.bbappend'):
1860 bbappend.append(f)
1861 else:
1862 collectlog.debug(1, "skipping %s: unknown file extension", f)
1863
1864 # Build a list of .bbappend files for each .bb file
1865 for f in bbappend:
1866 base = os.path.basename(f).replace('.bbappend', '.bb')
1867 self.bbappends.append((base, f))
1868
1869 # Find overlayed recipes
1870 # bbfiles will be in priority order which makes this easy
1871 bbfile_seen = dict()
1872 self.overlayed = defaultdict(list)
1873 for f in reversed(bbfiles):
1874 base = os.path.basename(f)
1875 if base not in bbfile_seen:
1876 bbfile_seen[base] = f
1877 else:
1878 topfile = bbfile_seen[base]
1879 self.overlayed[topfile].append(f)
1880
1881 return (bbfiles, masked, searchdirs)
1882
1883 def get_file_appends(self, fn):
1884 """
1885 Returns a list of .bbappend files to apply to fn
1886 """
1887 filelist = []
1888 f = os.path.basename(fn)
1889 for b in self.bbappends:
1890 (bbappend, filename) = b
1891 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1892 filelist.append(filename)
1893 return tuple(filelist)
1894
1895 def collection_priorities(self, pkgfns, fns, d):
1896 # Return the priorities of the entries in pkgfns
1897 # Also check that all the regexes in self.bbfile_config_priorities are used
1898 # (but to do that we need to ensure skipped recipes aren't counted, nor
1899 # collections in BBFILE_PATTERN_IGNORE_EMPTY)
1900
1901 priorities = {}
1902 seen = set()
1903 matched = set()
1904
1905 matched_regex = set()
1906 unmatched_regex = set()
1907 for _, _, regex, _ in self.bbfile_config_priorities:
1908 unmatched_regex.add(regex)
1909
1910 # Calculate priorities for each file
1911 for p in pkgfns:
1912 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1913 priorities[p], regex = self.calc_bbfile_priority(realfn)
1914 if regex in unmatched_regex:
1915 matched_regex.add(regex)
1916 unmatched_regex.remove(regex)
1917 seen.add(realfn)
1918 if regex:
1919 matched.add(realfn)
1920
1921 if unmatched_regex:
1922 # Account for bbappend files
1923 for b in self.bbappends:
1924 (bbfile, append) = b
1925 seen.add(append)
1926
1927 # Account for skipped recipes
1928 seen.update(fns)
1929
1930 seen.difference_update(matched)
1931
1932 def already_matched(fn):
1933 for regex in matched_regex:
1934 if regex.match(fn):
1935 return True
1936 return False
1937
1938 for unmatch in unmatched_regex.copy():
1939 for fn in seen:
1940 if unmatch.match(fn):
1941 # If the bbappend or file was already matched by another regex, skip it
1942 # e.g. for a layer within a layer, the outer regex could match, the inner
1943 # regex may match nothing and we should warn about that
1944 if already_matched(fn):
1945 continue
1946 unmatched_regex.remove(unmatch)
1947 break
1948
1949 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1950 if regex in unmatched_regex:
1951 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
1952 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1953 collection, pattern))
1954
1955 return priorities
1956
1957class ParsingFailure(Exception):
1958 def __init__(self, realexception, recipe):
1959 self.realexception = realexception
1960 self.recipe = recipe
1961 Exception.__init__(self, realexception, recipe)
1962
1963class Parser(multiprocessing.Process):
1964 def __init__(self, jobs, results, quit, init, profile):
1965 self.jobs = jobs
1966 self.results = results
1967 self.quit = quit
1968 self.init = init
1969 multiprocessing.Process.__init__(self)
1970 self.context = bb.utils.get_context().copy()
1971 self.handlers = bb.event.get_class_handlers().copy()
1972 self.profile = profile
1973
1974 def run(self):
1975
1976 if not self.profile:
1977 self.realrun()
1978 return
1979
1980 try:
1981 import cProfile as profile
1982 except:
1983 import profile
1984 prof = profile.Profile()
1985 try:
1986 profile.Profile.runcall(prof, self.realrun)
1987 finally:
1988 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1989 prof.dump_stats(logfile)
1990
1991 def realrun(self):
1992 if self.init:
1993 self.init()
1994
1995 pending = []
1996 while True:
1997 try:
1998 self.quit.get_nowait()
1999 except queue.Empty:
2000 pass
2001 else:
2002 self.results.close()
2003 self.results.join_thread()
2004 break
2005
2006 if pending:
2007 result = pending.pop()
2008 else:
2009 try:
2010 job = self.jobs.pop()
2011 except IndexError:
2012 self.results.close()
2013 self.results.join_thread()
2014 break
2015 result = self.parse(*job)
2016 # Clear the siggen cache after parsing to control memory usage, its huge
2017 bb.parse.siggen.postparsing_clean_cache()
2018 try:
2019 self.results.put(result, timeout=0.25)
2020 except queue.Full:
2021 pending.append(result)
2022
2023 def parse(self, mc, cache, filename, appends):
2024 try:
2025 origfilter = bb.event.LogHandler.filter
2026 # Record the filename we're parsing into any events generated
2027 def parse_filter(self, record):
2028 record.taskpid = bb.event.worker_pid
2029 record.fn = filename
2030 return True
2031
2032 # Reset our environment and handlers to the original settings
2033 bb.utils.set_context(self.context.copy())
2034 bb.event.set_class_handlers(self.handlers.copy())
2035 bb.event.LogHandler.filter = parse_filter
2036
2037 return True, mc, cache.parse(filename, appends)
2038 except Exception as exc:
2039 tb = sys.exc_info()[2]
2040 exc.recipe = filename
2041 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2042 return True, exc
2043 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2044 # and for example a worker thread doesn't just exit on its own in response to
2045 # a SystemExit event for example.
2046 except BaseException as exc:
2047 return True, ParsingFailure(exc, filename)
2048 finally:
2049 bb.event.LogHandler.filter = origfilter
2050
2051class CookerParser(object):
2052 def __init__(self, cooker, mcfilelist, masked):
2053 self.mcfilelist = mcfilelist
2054 self.cooker = cooker
2055 self.cfgdata = cooker.data
2056 self.cfghash = cooker.data_hash
2057 self.cfgbuilder = cooker.databuilder
2058
2059 # Accounting statistics
2060 self.parsed = 0
2061 self.cached = 0
2062 self.error = 0
2063 self.masked = masked
2064
2065 self.skipped = 0
2066 self.virtuals = 0
2067
2068 self.current = 0
2069 self.process_names = []
2070
2071 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2072 self.fromcache = set()
2073 self.willparse = set()
2074 for mc in self.cooker.multiconfigs:
2075 for filename in self.mcfilelist[mc]:
2076 appends = self.cooker.collections[mc].get_file_appends(filename)
2077 if not self.bb_caches[mc].cacheValid(filename, appends):
2078 self.willparse.add((mc, self.bb_caches[mc], filename, appends))
2079 else:
2080 self.fromcache.add((mc, self.bb_caches[mc], filename, appends))
2081
2082 self.total = len(self.fromcache) + len(self.willparse)
2083 self.toparse = len(self.willparse)
2084 self.progress_chunk = int(max(self.toparse / 100, 1))
2085
2086 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2087 multiprocessing.cpu_count()), self.toparse)
2088
2089 self.start()
2090 self.haveshutdown = False
2091 self.syncthread = None
2092
2093 def start(self):
2094 self.results = self.load_cached()
2095 self.processes = []
2096 if self.toparse:
2097 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2098 def init():
2099 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2100 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2101 signal.signal(signal.SIGINT, signal.SIG_IGN)
2102 bb.utils.set_process_name(multiprocessing.current_process().name)
2103 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2104 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2105
2106 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2107 self.result_queue = multiprocessing.Queue()
2108
2109 def chunkify(lst,n):
2110 return [lst[i::n] for i in range(n)]
2111 self.jobs = chunkify(list(self.willparse), self.num_processes)
2112
2113 for i in range(0, self.num_processes):
2114 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2115 parser.start()
2116 self.process_names.append(parser.name)
2117 self.processes.append(parser)
2118
2119 self.results = itertools.chain(self.results, self.parse_generator())
2120
2121 def shutdown(self, clean=True, force=False):
2122 if not self.toparse:
2123 return
2124 if self.haveshutdown:
2125 return
2126 self.haveshutdown = True
2127
2128 if clean:
2129 event = bb.event.ParseCompleted(self.cached, self.parsed,
2130 self.skipped, self.masked,
2131 self.virtuals, self.error,
2132 self.total)
2133
2134 bb.event.fire(event, self.cfgdata)
2135
2136 for process in self.processes:
2137 self.parser_quit.put(None)
2138
2139 # Cleanup the queue before call process.join(), otherwise there might be
2140 # deadlocks.
2141 while True:
2142 try:
2143 self.result_queue.get(timeout=0.25)
2144 except queue.Empty:
2145 break
2146
2147 for process in self.processes:
2148 if force:
2149 process.join(.1)
2150 process.terminate()
2151 else:
2152 process.join()
2153
2154 self.parser_quit.close()
2155 # Allow data left in the cancel queue to be discarded
2156 self.parser_quit.cancel_join_thread()
2157
2158 def sync_caches():
2159 for c in self.bb_caches.values():
2160 c.sync()
2161
2162 sync = threading.Thread(target=sync_caches, name="SyncThread")
2163 self.syncthread = sync
2164 sync.start()
2165 bb.codeparser.parser_cache_savemerge()
2166 bb.fetch.fetcher_parse_done()
2167 if self.cooker.configuration.profile:
2168 profiles = []
2169 for i in self.process_names:
2170 logfile = "profile-parse-%s.log" % i
2171 if os.path.exists(logfile):
2172 profiles.append(logfile)
2173
2174 pout = "profile-parse.log.processed"
2175 bb.utils.process_profilelog(profiles, pout = pout)
2176 print("Processed parsing statistics saved to %s" % (pout))
2177
2178 def final_cleanup(self):
2179 if self.syncthread:
2180 self.syncthread.join()
2181
2182 def load_cached(self):
2183 for mc, cache, filename, appends in self.fromcache:
2184 cached, infos = cache.load(filename, appends)
2185 yield not cached, mc, infos
2186
2187 def parse_generator(self):
2188 while True:
2189 if self.parsed >= self.toparse:
2190 break
2191
2192 try:
2193 result = self.result_queue.get(timeout=0.25)
2194 except queue.Empty:
2195 pass
2196 else:
2197 value = result[1]
2198 if isinstance(value, BaseException):
2199 raise value
2200 else:
2201 yield result
2202
2203 def parse_next(self):
2204 result = []
2205 parsed = None
2206 try:
2207 parsed, mc, result = next(self.results)
2208 except StopIteration:
2209 self.shutdown()
2210 return False
2211 except bb.BBHandledException as exc:
2212 self.error += 1
2213 logger.error('Failed to parse recipe: %s' % exc.recipe)
2214 self.shutdown(clean=False, force=True)
2215 return False
2216 except ParsingFailure as exc:
2217 self.error += 1
2218 logger.error('Unable to parse %s: %s' %
2219 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2220 self.shutdown(clean=False, force=True)
2221 return False
2222 except bb.parse.ParseError as exc:
2223 self.error += 1
2224 logger.error(str(exc))
2225 self.shutdown(clean=False, force=True)
2226 return False
2227 except bb.data_smart.ExpansionError as exc:
2228 self.error += 1
2229 bbdir = os.path.dirname(__file__) + os.sep
2230 etype, value, _ = sys.exc_info()
2231 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2232 logger.error('ExpansionError during parsing %s', value.recipe,
2233 exc_info=(etype, value, tb))
2234 self.shutdown(clean=False, force=True)
2235 return False
2236 except Exception as exc:
2237 self.error += 1
2238 etype, value, tb = sys.exc_info()
2239 if hasattr(value, "recipe"):
2240 logger.error('Unable to parse %s' % value.recipe,
2241 exc_info=(etype, value, exc.traceback))
2242 else:
2243 # Most likely, an exception occurred during raising an exception
2244 import traceback
2245 logger.error('Exception during parse: %s' % traceback.format_exc())
2246 self.shutdown(clean=False, force=True)
2247 return False
2248
2249 self.current += 1
2250 self.virtuals += len(result)
2251 if parsed:
2252 self.parsed += 1
2253 if self.parsed % self.progress_chunk == 0:
2254 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2255 self.cfgdata)
2256 else:
2257 self.cached += 1
2258
2259 for virtualfn, info_array in result:
2260 if info_array[0].skipped:
2261 self.skipped += 1
2262 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2263 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2264 parsed=parsed, watcher = self.cooker.add_filewatch)
2265 return True
2266
2267 def reparse(self, filename):
2268 to_reparse = set()
2269 for mc in self.cooker.multiconfigs:
2270 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename)))
2271
2272 for mc, filename, appends in to_reparse:
2273 infos = self.bb_caches[mc].parse(filename, appends)
2274 for vfn, info_array in infos:
2275 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)