summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cooker.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/cooker.py')
-rw-r--r--bitbake/lib/bb/cooker.py2361
1 files changed, 0 insertions, 2361 deletions
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
deleted file mode 100644
index c5bfef55d6..0000000000
--- a/bitbake/lib/bb/cooker.py
+++ /dev/null
@@ -1,2361 +0,0 @@
1#
2# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell
4# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
5# Copyright (C) 2005 Holger Hans Peter Freyther
6# Copyright (C) 2005 ROAD GmbH
7# Copyright (C) 2006 - 2007 Richard Purdie
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import sys, os, glob, os.path, re, time
13import itertools
14import logging
15import multiprocessing
16import threading
17from io import StringIO, UnsupportedOperation
18from contextlib import closing
19from collections import defaultdict, namedtuple
20import bb, bb.exceptions, bb.command
21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
22import queue
23import signal
24import prserv.serv
25import json
26import pickle
27import codecs
28import hashserv
29
30logger = logging.getLogger("BitBake")
31collectlog = logging.getLogger("BitBake.Collection")
32buildlog = logging.getLogger("BitBake.Build")
33parselog = logging.getLogger("BitBake.Parsing")
34providerlog = logging.getLogger("BitBake.Provider")
35
36class NoSpecificMatch(bb.BBHandledException):
37 """
38 Exception raised when no or multiple file matches are found
39 """
40
41class NothingToBuild(Exception):
42 """
43 Exception raised when there is nothing to build
44 """
45
46class CollectionError(bb.BBHandledException):
47 """
48 Exception raised when layer configuration is incorrect
49 """
50
51class state:
52 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
53
54 @classmethod
55 def get_name(cls, code):
56 for name in dir(cls):
57 value = getattr(cls, name)
58 if type(value) == type(cls.initial) and value == code:
59 return name
60 raise ValueError("Invalid status code: %s" % code)
61
62
63class SkippedPackage:
64 def __init__(self, info = None, reason = None):
65 self.pn = None
66 self.skipreason = None
67 self.provides = None
68 self.rprovides = None
69
70 if info:
71 self.pn = info.pn
72 self.skipreason = info.skipreason
73 self.provides = info.provides
74 self.rprovides = info.packages + info.rprovides
75 for package in info.packages:
76 self.rprovides += info.rprovides_pkg[package]
77 elif reason:
78 self.skipreason = reason
79
80
81class CookerFeatures(object):
82 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
83
84 def __init__(self):
85 self._features=set()
86
87 def setFeature(self, f):
88 # validate we got a request for a feature we support
89 if f not in CookerFeatures._feature_list:
90 return
91 self._features.add(f)
92
93 def __contains__(self, f):
94 return f in self._features
95
96 def __iter__(self):
97 return self._features.__iter__()
98
99 def __next__(self):
100 return next(self._features)
101
102
103class EventWriter:
104 def __init__(self, cooker, eventfile):
105 self.cooker = cooker
106 self.eventfile = eventfile
107 self.event_queue = []
108
109 def write_variables(self):
110 with open(self.eventfile, "a") as f:
111 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
112
113 def send(self, event):
114 with open(self.eventfile, "a") as f:
115 try:
116 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
117 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
118 "vars": str_event}))
119 except Exception as err:
120 import traceback
121 print(err, traceback.format_exc())
122
123
124#============================================================================#
125# BBCooker
126#============================================================================#
127class BBCooker:
128 """
129 Manages one bitbake build run
130 """
131
132 def __init__(self, featureSet=None, server=None):
133 self.recipecaches = None
134 self.baseconfig_valid = False
135 self.parsecache_valid = False
136 self.eventlog = None
137 self.skiplist = {}
138 self.featureset = CookerFeatures()
139 if featureSet:
140 for f in featureSet:
141 self.featureset.setFeature(f)
142
143 self.orig_syspath = sys.path.copy()
144 self.orig_sysmodules = [*sys.modules]
145
146 self.configuration = bb.cookerdata.CookerConfiguration()
147
148 self.process_server = server
149 self.idleCallBackRegister = None
150 self.waitIdle = None
151 if server:
152 self.idleCallBackRegister = server.register_idle_function
153 self.waitIdle = server.wait_for_idle
154
155 bb.debug(1, "BBCooker starting %s" % time.time())
156
157 self.configwatched = {}
158 self.parsewatched = {}
159
160 # If being called by something like tinfoil, we need to clean cached data
161 # which may now be invalid
162 bb.parse.clear_cache()
163 bb.parse.BBHandler.cached_statements = {}
164
165 self.ui_cmdline = None
166 self.hashserv = None
167 self.hashservaddr = None
168
169 # TOSTOP must not be set or our children will hang when they output
170 try:
171 fd = sys.stdout.fileno()
172 if os.isatty(fd):
173 import termios
174 tcattr = termios.tcgetattr(fd)
175 if tcattr[3] & termios.TOSTOP:
176 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
177 tcattr[3] = tcattr[3] & ~termios.TOSTOP
178 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
179 except UnsupportedOperation:
180 pass
181
182 self.command = bb.command.Command(self, self.process_server)
183 self.state = state.initial
184
185 self.parser = None
186
187 signal.signal(signal.SIGTERM, self.sigterm_exception)
188 # Let SIGHUP exit as SIGTERM
189 signal.signal(signal.SIGHUP, self.sigterm_exception)
190
191 bb.debug(1, "BBCooker startup complete %s" % time.time())
192
193 def init_configdata(self):
194 if not hasattr(self, "data"):
195 self.initConfigurationData()
196 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
197 self.handlePRServ()
198
199 def _baseconfig_set(self, value):
200 if value and not self.baseconfig_valid:
201 bb.server.process.serverlog("Base config valid")
202 elif not value and self.baseconfig_valid:
203 bb.server.process.serverlog("Base config invalidated")
204 self.baseconfig_valid = value
205
206 def _parsecache_set(self, value):
207 if value and not self.parsecache_valid:
208 bb.server.process.serverlog("Parse cache valid")
209 elif not value and self.parsecache_valid:
210 bb.server.process.serverlog("Parse cache invalidated")
211 self.parsecache_valid = value
212
213 def add_filewatch(self, deps, configwatcher=False):
214 if configwatcher:
215 watcher = self.configwatched
216 else:
217 watcher = self.parsewatched
218
219 for i in deps:
220 f = i[0]
221 mtime = i[1]
222 watcher[f] = mtime
223
224 def sigterm_exception(self, signum, stackframe):
225 if signum == signal.SIGTERM:
226 bb.warn("Cooker received SIGTERM, shutting down...")
227 elif signum == signal.SIGHUP:
228 bb.warn("Cooker received SIGHUP, shutting down...")
229 self.state = state.forceshutdown
230 bb.event._should_exit.set()
231
232 def setFeatures(self, features):
233 # we only accept a new feature set if we're in state initial, so we can reset without problems
234 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
235 raise Exception("Illegal state for feature set change")
236 original_featureset = list(self.featureset)
237 for feature in features:
238 self.featureset.setFeature(feature)
239 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
240 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"):
241 self.reset()
242
243 def initConfigurationData(self):
244
245 self.state = state.initial
246 self.caches_array = []
247
248 sys.path = self.orig_syspath.copy()
249 for mod in [*sys.modules]:
250 if mod not in self.orig_sysmodules:
251 del sys.modules[mod]
252
253 self.configwatched = {}
254
255 # Need to preserve BB_CONSOLELOG over resets
256 consolelog = None
257 if hasattr(self, "data"):
258 consolelog = self.data.getVar("BB_CONSOLELOG")
259
260 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
261 self.enableDataTracking()
262
263 caches_name_array = ['bb.cache:CoreRecipeInfo']
264 # We hardcode all known cache types in a single place, here.
265 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
266 caches_name_array.append("bb.cache_extra:HobRecipeInfo")
267 if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
268 caches_name_array.append("bb.cache:SiggenRecipeInfo")
269
270 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
271 # This is the entry point, no further check needed!
272 for var in caches_name_array:
273 try:
274 module_name, cache_name = var.split(':')
275 module = __import__(module_name, fromlist=(cache_name,))
276 self.caches_array.append(getattr(module, cache_name))
277 except ImportError as exc:
278 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
279 raise bb.BBHandledException()
280
281 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
282 self.databuilder.parseBaseConfiguration()
283 self.data = self.databuilder.data
284 self.data_hash = self.databuilder.data_hash
285 self.extraconfigdata = {}
286
287 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
288 if not self.configuration.writeeventlog and eventlog:
289 self.setupEventLog(eventlog)
290
291 if consolelog:
292 self.data.setVar("BB_CONSOLELOG", consolelog)
293
294 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
295
296 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
297 self.disableDataTracking()
298
299 for mc in self.databuilder.mcdata.values():
300 self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True)
301
302 self._baseconfig_set(True)
303 self._parsecache_set(False)
304
305 def handlePRServ(self):
306 # Setup a PR Server based on the new configuration
307 try:
308 self.prhost = prserv.serv.auto_start(self.data)
309 except prserv.serv.PRServiceConfigError as e:
310 bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
311
312 if self.data.getVar("BB_HASHSERVE") == "auto":
313 # Create a new hash server bound to a unix domain socket
314 if not self.hashserv:
315 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
316 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
317 if upstream:
318 import socket
319 try:
320 sock = socket.create_connection(upstream.split(":"), 5)
321 sock.close()
322 except socket.error as e:
323 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
324 % (upstream, repr(e)))
325
326 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
327 self.hashserv = hashserv.create_server(
328 self.hashservaddr,
329 dbfile,
330 sync=False,
331 upstream=upstream,
332 )
333 self.hashserv.serve_as_process(log_level=logging.WARNING)
334 for mc in self.databuilder.mcdata:
335 self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
336 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
337
338 bb.parse.init_parser(self.data)
339
340 def enableDataTracking(self):
341 self.configuration.tracking = True
342 if hasattr(self, "data"):
343 self.data.enableTracking()
344
345 def disableDataTracking(self):
346 self.configuration.tracking = False
347 if hasattr(self, "data"):
348 self.data.disableTracking()
349
350 def revalidateCaches(self):
351 bb.parse.clear_cache()
352
353 clean = True
354 for f in self.configwatched:
355 if not bb.parse.check_mtime(f, self.configwatched[f]):
356 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
357 self._baseconfig_set(False)
358 self._parsecache_set(False)
359 clean = False
360 break
361
362 if clean:
363 for f in self.parsewatched:
364 if not bb.parse.check_mtime(f, self.parsewatched[f]):
365 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
366 self._parsecache_set(False)
367 clean = False
368 break
369
370 if not clean:
371 bb.parse.BBHandler.cached_statements = {}
372
373 def parseConfiguration(self):
374 self.updateCacheSync()
375
376 # Change nice level if we're asked to
377 nice = self.data.getVar("BB_NICE_LEVEL")
378 if nice:
379 curnice = os.nice(0)
380 nice = int(nice) - curnice
381 buildlog.verbose("Renice to %s " % os.nice(nice))
382
383 if self.recipecaches:
384 del self.recipecaches
385 self.multiconfigs = self.databuilder.mcdata.keys()
386 self.recipecaches = {}
387 for mc in self.multiconfigs:
388 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
389
390 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
391 self.collections = {}
392 for mc in self.multiconfigs:
393 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
394
395 self._parsecache_set(False)
396
397 def setupEventLog(self, eventlog):
398 if self.eventlog and self.eventlog[0] != eventlog:
399 bb.event.unregister_UIHhandler(self.eventlog[1])
400 self.eventlog = None
401 if not self.eventlog or self.eventlog[0] != eventlog:
402 # we log all events to a file if so directed
403 # register the log file writer as UI Handler
404 if not os.path.exists(os.path.dirname(eventlog)):
405 bb.utils.mkdirhier(os.path.dirname(eventlog))
406 writer = EventWriter(self, eventlog)
407 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
408 self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer)
409
410 def updateConfigOpts(self, options, environment, cmdline):
411 self.ui_cmdline = cmdline
412 clean = True
413 for o in options:
414 if o in ['prefile', 'postfile']:
415 # Only these options may require a reparse
416 try:
417 if getattr(self.configuration, o) == options[o]:
418 # Value is the same, no need to mark dirty
419 continue
420 except AttributeError:
421 pass
422 logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
423 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
424 clean = False
425 if hasattr(self.configuration, o):
426 setattr(self.configuration, o, options[o])
427
428 if self.configuration.writeeventlog:
429 self.setupEventLog(self.configuration.writeeventlog)
430
431 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
432 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
433
434 if hasattr(self, "data"):
435 origenv = bb.data.init()
436 for k in environment:
437 origenv.setVar(k, environment[k])
438 self.data.setVar("BB_ORIGENV", origenv)
439
440 for k in bb.utils.approved_variables():
441 if k in environment and k not in self.configuration.env:
442 logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
443 self.configuration.env[k] = environment[k]
444 clean = False
445 if k in self.configuration.env and k not in environment:
446 logger.debug("Updating environment variable %s (deleted)" % (k))
447 del self.configuration.env[k]
448 clean = False
449 if k not in self.configuration.env and k not in environment:
450 continue
451 if environment[k] != self.configuration.env[k]:
452 logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
453 self.configuration.env[k] = environment[k]
454 clean = False
455
456 # Now update all the variables not in the datastore to match
457 self.configuration.env = environment
458
459 self.revalidateCaches()
460 if not clean:
461 logger.debug("Base environment change, triggering reparse")
462 self.reset()
463
464 def showVersions(self):
465
466 (latest_versions, preferred_versions, required) = self.findProviders()
467
468 logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
469 logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
470
471 for p in sorted(self.recipecaches[''].pkg_pn):
472 preferred = preferred_versions[p]
473 latest = latest_versions[p]
474 requiredstr = ""
475 preferredstr = ""
476 if required[p]:
477 if preferred[0] is not None:
478 requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
479 else:
480 bb.fatal("REQUIRED_VERSION of package %s not available" % p)
481 else:
482 preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
483
484 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
485
486 if preferred == latest:
487 preferredstr = ""
488
489 logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
490
491 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
492 """
493 Show the outer or per-recipe environment
494 """
495 fn = None
496 envdata = None
497 mc = ''
498 if not pkgs_to_build:
499 pkgs_to_build = []
500
501 orig_tracking = self.configuration.tracking
502 if not orig_tracking:
503 self.enableDataTracking()
504 self.reset()
505 # reset() resets to the UI requested value so we have to redo this
506 self.enableDataTracking()
507
508 def mc_base(p):
509 if p.startswith('mc:'):
510 s = p.split(':')
511 if len(s) == 2:
512 return s[1]
513 return None
514
515 if buildfile:
516 # Parse the configuration here. We need to do it explicitly here since
517 # this showEnvironment() code path doesn't use the cache
518 self.parseConfiguration()
519
520 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
521 fn = self.matchFile(fn, mc)
522 fn = bb.cache.realfn2virtual(fn, cls, mc)
523 elif len(pkgs_to_build) == 1:
524 mc = mc_base(pkgs_to_build[0])
525 if not mc:
526 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
527 if pkgs_to_build[0] in set(ignore.split()):
528 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
529
530 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
531
532 mc = runlist[0][0]
533 fn = runlist[0][3]
534
535 if fn:
536 try:
537 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
538 envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
539 except Exception as e:
540 parselog.exception("Unable to read %s", fn)
541 raise
542 else:
543 if not mc in self.databuilder.mcdata:
544 bb.fatal('No multiconfig named "%s" found' % mc)
545 envdata = self.databuilder.mcdata[mc]
546 data.expandKeys(envdata)
547 parse.ast.runAnonFuncs(envdata)
548
549 # Display history
550 with closing(StringIO()) as env:
551 self.data.inchistory.emit(env)
552 logger.plain(env.getvalue())
553
554 # emit variables and shell functions
555 with closing(StringIO()) as env:
556 data.emit_env(env, envdata, True)
557 logger.plain(env.getvalue())
558
559 # emit the metadata which isn't valid shell
560 for e in sorted(envdata.keys()):
561 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
562 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
563
564 if not orig_tracking:
565 self.disableDataTracking()
566 self.reset()
567
568 def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
569 """
570 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
571 """
572 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
573
574 # A task of None means use the default task
575 if task is None:
576 task = self.configuration.cmd
577 if not task.startswith("do_"):
578 task = "do_%s" % task
579
580 targetlist = self.checkPackages(pkgs_to_build, task)
581 fulltargetlist = []
582 defaulttask_implicit = ''
583 defaulttask_explicit = False
584 wildcard = False
585
586 # Wild card expansion:
587 # Replace string such as "mc:*:bash"
588 # into "mc:A:bash mc:B:bash bash"
589 for k in targetlist:
590 if k.startswith("mc:") and k.count(':') >= 2:
591 if wildcard:
592 bb.fatal('multiconfig conflict')
593 if k.split(":")[1] == "*":
594 wildcard = True
595 for mc in self.multiconfigs:
596 if mc:
597 fulltargetlist.append(k.replace('*', mc))
598 # implicit default task
599 else:
600 defaulttask_implicit = k.split(":")[2]
601 else:
602 fulltargetlist.append(k)
603 else:
604 defaulttask_explicit = True
605 fulltargetlist.append(k)
606
607 if not defaulttask_explicit and defaulttask_implicit != '':
608 fulltargetlist.append(defaulttask_implicit)
609
610 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
611 taskdata = {}
612 localdata = {}
613
614 for mc in self.multiconfigs:
615 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete)
616 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
617 bb.data.expandKeys(localdata[mc])
618
619 current = 0
620 runlist = []
621 for k in fulltargetlist:
622 origk = k
623 mc = ""
624 if k.startswith("mc:") and k.count(':') >= 2:
625 mc = k.split(":")[1]
626 k = ":".join(k.split(":")[2:])
627 ktask = task
628 if ":do_" in k:
629 k2 = k.split(":do_")
630 k = k2[0]
631 ktask = k2[1]
632
633 if mc not in self.multiconfigs:
634 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named %s" % (origk, mc))
635
636 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
637 current += 1
638 if not ktask.startswith("do_"):
639 ktask = "do_%s" % ktask
640 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
641 # e.g. in ASSUME_PROVIDED
642 continue
643 fn = taskdata[mc].build_targets[k][0]
644 runlist.append([mc, k, ktask, fn])
645 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
646
647 havemc = False
648 for mc in self.multiconfigs:
649 if taskdata[mc].get_mcdepends():
650 havemc = True
651
652 # No need to do check providers if there are no mcdeps or not an mc build
653 if havemc or len(self.multiconfigs) > 1:
654 seen = set()
655 new = True
656 # Make sure we can provide the multiconfig dependency
657 while new:
658 mcdeps = set()
659 # Add unresolved first, so we can get multiconfig indirect dependencies on time
660 for mc in self.multiconfigs:
661 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
662 mcdeps |= set(taskdata[mc].get_mcdepends())
663 new = False
664 for k in mcdeps:
665 if k in seen:
666 continue
667 l = k.split(':')
668 depmc = l[2]
669 if depmc not in self.multiconfigs:
670 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
671 else:
672 logger.debug("Adding providers for multiconfig dependency %s" % l[3])
673 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
674 seen.add(k)
675 new = True
676
677 for mc in self.multiconfigs:
678 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
679
680 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
681 return taskdata, runlist
682
683 def prepareTreeData(self, pkgs_to_build, task):
684 """
685 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
686 """
687
688 # We set halt to False here to prevent unbuildable targets raising
689 # an exception when we're just generating data
690 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
691
692 return runlist, taskdata
693
694 ######## WARNING : this function requires cache_extra to be enabled ########
695
696 def generateTaskDepTreeData(self, pkgs_to_build, task):
697 """
698 Create a dependency graph of pkgs_to_build including reverse dependency
699 information.
700 """
701 if not task.startswith("do_"):
702 task = "do_%s" % task
703
704 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
705 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
706 rq.rqdata.prepare()
707 return self.buildDependTree(rq, taskdata)
708
709 @staticmethod
710 def add_mc_prefix(mc, pn):
711 if mc:
712 return "mc:%s:%s" % (mc, pn)
713 return pn
714
715 def buildDependTree(self, rq, taskdata):
716 seen_fns = []
717 depend_tree = {}
718 depend_tree["depends"] = {}
719 depend_tree["tdepends"] = {}
720 depend_tree["pn"] = {}
721 depend_tree["rdepends-pn"] = {}
722 depend_tree["packages"] = {}
723 depend_tree["rdepends-pkg"] = {}
724 depend_tree["rrecs-pkg"] = {}
725 depend_tree['providermap'] = {}
726 depend_tree["layer-priorities"] = self.bbfile_config_priorities
727
728 for mc in taskdata:
729 for name, fn in list(taskdata[mc].get_providermap().items()):
730 pn = self.recipecaches[mc].pkg_fn[fn]
731 pn = self.add_mc_prefix(mc, pn)
732 if name != pn:
733 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
734 depend_tree['providermap'][name] = (pn, version)
735
736 for tid in rq.rqdata.runtaskentries:
737 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
738 pn = self.recipecaches[mc].pkg_fn[taskfn]
739 pn = self.add_mc_prefix(mc, pn)
740 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
741 if pn not in depend_tree["pn"]:
742 depend_tree["pn"][pn] = {}
743 depend_tree["pn"][pn]["filename"] = taskfn
744 depend_tree["pn"][pn]["version"] = version
745 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
746
747 # if we have extra caches, list all attributes they bring in
748 extra_info = []
749 for cache_class in self.caches_array:
750 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
751 cachefields = getattr(cache_class, 'cachefields', [])
752 extra_info = extra_info + cachefields
753
754 # for all attributes stored, add them to the dependency tree
755 for ei in extra_info:
756 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
757
758
759 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
760 if not dotname in depend_tree["tdepends"]:
761 depend_tree["tdepends"][dotname] = []
762 for dep in rq.rqdata.runtaskentries[tid].depends:
763 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
764 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
765 if depmc:
766 depmc = "mc:" + depmc + ":"
767 depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
768 if taskfn not in seen_fns:
769 seen_fns.append(taskfn)
770 packages = []
771
772 depend_tree["depends"][pn] = []
773 for dep in taskdata[mc].depids[taskfn]:
774 depend_tree["depends"][pn].append(dep)
775
776 depend_tree["rdepends-pn"][pn] = []
777 for rdep in taskdata[mc].rdepids[taskfn]:
778 depend_tree["rdepends-pn"][pn].append(rdep)
779
780 rdepends = self.recipecaches[mc].rundeps[taskfn]
781 for package in rdepends:
782 depend_tree["rdepends-pkg"][package] = []
783 for rdepend in rdepends[package]:
784 depend_tree["rdepends-pkg"][package].append(rdepend)
785 packages.append(package)
786
787 rrecs = self.recipecaches[mc].runrecs[taskfn]
788 for package in rrecs:
789 depend_tree["rrecs-pkg"][package] = []
790 for rdepend in rrecs[package]:
791 depend_tree["rrecs-pkg"][package].append(rdepend)
792 if not package in packages:
793 packages.append(package)
794
795 for package in packages:
796 if package not in depend_tree["packages"]:
797 depend_tree["packages"][package] = {}
798 depend_tree["packages"][package]["pn"] = pn
799 depend_tree["packages"][package]["filename"] = taskfn
800 depend_tree["packages"][package]["version"] = version
801
802 return depend_tree
803
804 ######## WARNING : this function requires cache_extra to be enabled ########
805 def generatePkgDepTreeData(self, pkgs_to_build, task):
806 """
807 Create a dependency tree of pkgs_to_build, returning the data.
808 """
809 if not task.startswith("do_"):
810 task = "do_%s" % task
811
812 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
813
814 seen_fns = []
815 depend_tree = {}
816 depend_tree["depends"] = {}
817 depend_tree["pn"] = {}
818 depend_tree["rdepends-pn"] = {}
819 depend_tree["rdepends-pkg"] = {}
820 depend_tree["rrecs-pkg"] = {}
821
822 # if we have extra caches, list all attributes they bring in
823 extra_info = []
824 for cache_class in self.caches_array:
825 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
826 cachefields = getattr(cache_class, 'cachefields', [])
827 extra_info = extra_info + cachefields
828
829 tids = []
830 for mc in taskdata:
831 for tid in taskdata[mc].taskentries:
832 tids.append(tid)
833
834 for tid in tids:
835 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
836
837 pn = self.recipecaches[mc].pkg_fn[taskfn]
838 pn = self.add_mc_prefix(mc, pn)
839
840 if pn not in depend_tree["pn"]:
841 depend_tree["pn"][pn] = {}
842 depend_tree["pn"][pn]["filename"] = taskfn
843 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
844 depend_tree["pn"][pn]["version"] = version
845 rdepends = self.recipecaches[mc].rundeps[taskfn]
846 rrecs = self.recipecaches[mc].runrecs[taskfn]
847 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
848
849 # for all extra attributes stored, add them to the dependency tree
850 for ei in extra_info:
851 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
852
853 if taskfn not in seen_fns:
854 seen_fns.append(taskfn)
855
856 depend_tree["depends"][pn] = []
857 for dep in taskdata[mc].depids[taskfn]:
858 pn_provider = ""
859 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
860 fn_provider = taskdata[mc].build_targets[dep][0]
861 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
862 else:
863 pn_provider = dep
864 pn_provider = self.add_mc_prefix(mc, pn_provider)
865 depend_tree["depends"][pn].append(pn_provider)
866
867 depend_tree["rdepends-pn"][pn] = []
868 for rdep in taskdata[mc].rdepids[taskfn]:
869 pn_rprovider = ""
870 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
871 fn_rprovider = taskdata[mc].run_targets[rdep][0]
872 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
873 else:
874 pn_rprovider = rdep
875 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
876 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
877
878 depend_tree["rdepends-pkg"].update(rdepends)
879 depend_tree["rrecs-pkg"].update(rrecs)
880
881 return depend_tree
882
883 def generateDepTreeEvent(self, pkgs_to_build, task):
884 """
885 Create a task dependency graph of pkgs_to_build.
886 Generate an event with the result
887 """
888 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
889 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
890
891 def generateDotGraphFiles(self, pkgs_to_build, task):
892 """
893 Create a task dependency graph of pkgs_to_build.
894 Save the result to a set of .dot files.
895 """
896
897 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
898
899 with open('pn-buildlist', 'w') as f:
900 for pn in depgraph["pn"]:
901 f.write(pn + "\n")
902 logger.info("PN build list saved to 'pn-buildlist'")
903
904 # Remove old format output files to ensure no confusion with stale data
905 try:
906 os.unlink('pn-depends.dot')
907 except FileNotFoundError:
908 pass
909 try:
910 os.unlink('package-depends.dot')
911 except FileNotFoundError:
912 pass
913 try:
914 os.unlink('recipe-depends.dot')
915 except FileNotFoundError:
916 pass
917
918 with open('task-depends.dot', 'w') as f:
919 f.write("digraph depends {\n")
920 for task in sorted(depgraph["tdepends"]):
921 (pn, taskname) = task.rsplit(".", 1)
922 fn = depgraph["pn"][pn]["filename"]
923 version = depgraph["pn"][pn]["version"]
924 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
925 for dep in sorted(depgraph["tdepends"][task]):
926 f.write('"%s" -> "%s"\n' % (task, dep))
927 f.write("}\n")
928 logger.info("Task dependencies saved to 'task-depends.dot'")
929
930 def show_appends_with_no_recipes(self):
931 appends_without_recipes = {}
932 # Determine which bbappends haven't been applied
933 for mc in self.multiconfigs:
934 # First get list of recipes, including skipped
935 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
936 recipefns.extend(self.skiplist.keys())
937
938 # Work out list of bbappends that have been applied
939 applied_appends = []
940 for fn in recipefns:
941 applied_appends.extend(self.collections[mc].get_file_appends(fn))
942
943 appends_without_recipes[mc] = []
944 for _, appendfn in self.collections[mc].bbappends:
945 if not appendfn in applied_appends:
946 appends_without_recipes[mc].append(appendfn)
947
948 msgs = []
949 for mc in sorted(appends_without_recipes.keys()):
950 if appends_without_recipes[mc]:
951 msgs.append('No recipes in %s available for:\n %s' % (mc if mc else 'default',
952 '\n '.join(appends_without_recipes[mc])))
953
954 if msgs:
955 msg = "\n".join(msgs)
956 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
957 False) or "no"
958 if warn_only.lower() in ("1", "yes", "true"):
959 bb.warn(msg)
960 else:
961 bb.fatal(msg)
962
963 def handlePrefProviders(self):
964
965 for mc in self.multiconfigs:
966 localdata = data.createCopy(self.databuilder.mcdata[mc])
967 bb.data.expandKeys(localdata)
968
969 # Handle PREFERRED_PROVIDERS
970 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
971 try:
972 (providee, provider) = p.split(':')
973 except:
974 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
975 continue
976 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
977 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
978 self.recipecaches[mc].preferred[providee] = provider
979
980 def findConfigFilePath(self, configfile):
981 """
982 Find the location on disk of configfile and if it exists and was parsed by BitBake
983 emit the ConfigFilePathFound event with the path to the file.
984 """
985 path = bb.cookerdata.findConfigFile(configfile, self.data)
986 if not path:
987 return
988
989 # Generate a list of parsed configuration files by searching the files
990 # listed in the __depends and __base_depends variables with a .conf suffix.
991 conffiles = []
992 dep_files = self.data.getVar('__base_depends', False) or []
993 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
994
995 for f in dep_files:
996 if f[0].endswith(".conf"):
997 conffiles.append(f[0])
998
999 _, conf, conffile = path.rpartition("conf/")
1000 match = os.path.join(conf, conffile)
1001 # Try and find matches for conf/conffilename.conf as we don't always
1002 # have the full path to the file.
1003 for cfg in conffiles:
1004 if cfg.endswith(match):
1005 bb.event.fire(bb.event.ConfigFilePathFound(path),
1006 self.data)
1007 break
1008
1009 def findFilesMatchingInDir(self, filepattern, directory):
1010 """
1011 Searches for files containing the substring 'filepattern' which are children of
1012 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1013 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1014 or to find all machine configuration files one could call:
1015 findFilesMatchingInDir(self, '.conf', 'conf/machine')
1016 """
1017
1018 matches = []
1019 bbpaths = self.data.getVar('BBPATH').split(':')
1020 for path in bbpaths:
1021 dirpath = os.path.join(path, directory)
1022 if os.path.exists(dirpath):
1023 for root, dirs, files in os.walk(dirpath):
1024 for f in files:
1025 if filepattern in f:
1026 matches.append(f)
1027
1028 if matches:
1029 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1030
1031 def testCookerCommandEvent(self, filepattern):
1032 # Dummy command used by OEQA selftest to test tinfoil without IO
1033 matches = ["A", "B"]
1034 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1035
1036 def findProviders(self, mc=''):
1037 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1038
1039 def findBestProvider(self, pn, mc=''):
1040 if pn in self.recipecaches[mc].providers:
1041 filenames = self.recipecaches[mc].providers[pn]
1042 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1043 if eligible is not None:
1044 filename = eligible[0]
1045 else:
1046 filename = None
1047 return None, None, None, filename
1048 elif pn in self.recipecaches[mc].pkg_pn:
1049 (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1050 if required and preferred_file is None:
1051 return None, None, None, None
1052 return (latest, latest_f, preferred_ver, preferred_file)
1053 else:
1054 return None, None, None, None
1055
1056 def findConfigFiles(self, varname):
1057 """
1058 Find config files which are appropriate values for varname.
1059 i.e. MACHINE, DISTRO
1060 """
1061 possible = []
1062 var = varname.lower()
1063
1064 data = self.data
1065 # iterate configs
1066 bbpaths = data.getVar('BBPATH').split(':')
1067 for path in bbpaths:
1068 confpath = os.path.join(path, "conf", var)
1069 if os.path.exists(confpath):
1070 for root, dirs, files in os.walk(confpath):
1071 # get all child files, these are appropriate values
1072 for f in files:
1073 val, sep, end = f.rpartition('.')
1074 if end == 'conf':
1075 possible.append(val)
1076
1077 if possible:
1078 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1079
1080 def findInheritsClass(self, klass):
1081 """
1082 Find all recipes which inherit the specified class
1083 """
1084 pkg_list = []
1085
1086 for pfn in self.recipecaches[''].pkg_fn:
1087 inherits = self.recipecaches[''].inherits.get(pfn, None)
1088 if inherits and klass in inherits:
1089 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
1090
1091 return pkg_list
1092
1093 def generateTargetsTree(self, klass=None, pkgs=None):
1094 """
1095 Generate a dependency tree of buildable targets
1096 Generate an event with the result
1097 """
1098 # if the caller hasn't specified a pkgs list default to universe
1099 if not pkgs:
1100 pkgs = ['universe']
1101 # if inherited_class passed ensure all recipes which inherit the
1102 # specified class are included in pkgs
1103 if klass:
1104 extra_pkgs = self.findInheritsClass(klass)
1105 pkgs = pkgs + extra_pkgs
1106
1107 # generate a dependency tree for all our packages
1108 tree = self.generatePkgDepTreeData(pkgs, 'build')
1109 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1110
1111 def interactiveMode( self ):
1112 """Drop off into a shell"""
1113 try:
1114 from bb import shell
1115 except ImportError:
1116 parselog.exception("Interactive mode not available")
1117 raise bb.BBHandledException()
1118 else:
1119 shell.start( self )
1120
1121
1122 def handleCollections(self, collections):
1123 """Handle collections"""
1124 errors = False
1125 self.bbfile_config_priorities = []
1126 if collections:
1127 collection_priorities = {}
1128 collection_depends = {}
1129 collection_list = collections.split()
1130 min_prio = 0
1131 for c in collection_list:
1132 bb.debug(1,'Processing %s in collection list' % (c))
1133
1134 # Get collection priority if defined explicitly
1135 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
1136 if priority:
1137 try:
1138 prio = int(priority)
1139 except ValueError:
1140 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1141 errors = True
1142 if min_prio == 0 or prio < min_prio:
1143 min_prio = prio
1144 collection_priorities[c] = prio
1145 else:
1146 collection_priorities[c] = None
1147
1148 # Check dependencies and store information for priority calculation
1149 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
1150 if deps:
1151 try:
1152 depDict = bb.utils.explode_dep_versions2(deps)
1153 except bb.utils.VersionStringException as vse:
1154 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1155 for dep, oplist in list(depDict.items()):
1156 if dep in collection_list:
1157 for opstr in oplist:
1158 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
1159 (op, depver) = opstr.split()
1160 if layerver:
1161 try:
1162 res = bb.utils.vercmp_string_op(layerver, depver, op)
1163 except bb.utils.VersionStringException as vse:
1164 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1165 if not res:
1166 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1167 errors = True
1168 else:
1169 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1170 errors = True
1171 else:
1172 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1173 errors = True
1174 collection_depends[c] = list(depDict.keys())
1175 else:
1176 collection_depends[c] = []
1177
1178 # Check recommends and store information for priority calculation
1179 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
1180 if recs:
1181 try:
1182 recDict = bb.utils.explode_dep_versions2(recs)
1183 except bb.utils.VersionStringException as vse:
1184 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1185 for rec, oplist in list(recDict.items()):
1186 if rec in collection_list:
1187 if oplist:
1188 opstr = oplist[0]
1189 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
1190 if layerver:
1191 (op, recver) = opstr.split()
1192 try:
1193 res = bb.utils.vercmp_string_op(layerver, recver, op)
1194 except bb.utils.VersionStringException as vse:
1195 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1196 if not res:
1197 parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1198 continue
1199 else:
1200 parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1201 continue
1202 parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1203 collection_depends[c].append(rec)
1204 else:
1205 parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1206
1207 # Recursively work out collection priorities based on dependencies
1208 def calc_layer_priority(collection):
1209 if not collection_priorities[collection]:
1210 max_depprio = min_prio
1211 for dep in collection_depends[collection]:
1212 calc_layer_priority(dep)
1213 depprio = collection_priorities[dep]
1214 if depprio > max_depprio:
1215 max_depprio = depprio
1216 max_depprio += 1
1217 parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
1218 collection_priorities[collection] = max_depprio
1219
1220 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1221 for c in collection_list:
1222 calc_layer_priority(c)
1223 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
1224 if regex is None:
1225 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1226 errors = True
1227 continue
1228 elif regex == "":
1229 parselog.debug("BBFILE_PATTERN_%s is empty" % c)
1230 cre = re.compile('^NULL$')
1231 errors = False
1232 else:
1233 try:
1234 cre = re.compile(regex)
1235 except re.error:
1236 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1237 errors = True
1238 continue
1239 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1240 if errors:
1241 # We've already printed the actual error(s)
1242 raise CollectionError("Errors during parsing layer configuration")
1243
1244 def buildSetVars(self):
1245 """
1246 Setup any variables needed before starting a build
1247 """
1248 t = time.gmtime()
1249 for mc in self.databuilder.mcdata:
1250 ds = self.databuilder.mcdata[mc]
1251 if not ds.getVar("BUILDNAME", False):
1252 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1253 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1254 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1255 ds.setVar("TIME", time.strftime('%H%M%S', t))
1256
1257 def reset_mtime_caches(self):
1258 """
1259 Reset mtime caches - this is particularly important when memory resident as something
1260 which is cached is not unlikely to have changed since the last invocation (e.g. a
1261 file associated with a recipe might have been modified by the user).
1262 """
1263 build.reset_cache()
1264 bb.fetch._checksum_cache.mtime_cache.clear()
1265 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1266 if siggen_cache:
1267 bb.parse.siggen.checksum_cache.mtime_cache.clear()
1268
1269 def matchFiles(self, bf, mc=''):
1270 """
1271 Find the .bb files which match the expression in 'buildfile'.
1272 """
1273 if bf.startswith("/") or bf.startswith("../"):
1274 bf = os.path.abspath(bf)
1275
1276 collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1277 filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1278 try:
1279 os.stat(bf)
1280 bf = os.path.abspath(bf)
1281 return [bf]
1282 except OSError:
1283 regexp = re.compile(bf)
1284 matches = []
1285 for f in filelist:
1286 if regexp.search(f) and os.path.isfile(f):
1287 matches.append(f)
1288 return matches
1289
1290 def matchFile(self, buildfile, mc=''):
1291 """
1292 Find the .bb file which matches the expression in 'buildfile'.
1293 Raise an error if multiple files
1294 """
1295 matches = self.matchFiles(buildfile, mc)
1296 if len(matches) != 1:
1297 if matches:
1298 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1299 if matches:
1300 for f in matches:
1301 msg += "\n %s" % f
1302 parselog.error(msg)
1303 else:
1304 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1305 raise NoSpecificMatch
1306 return matches[0]
1307
1308 def buildFile(self, buildfile, task):
1309 """
1310 Build the file matching regexp buildfile
1311 """
1312 bb.event.fire(bb.event.BuildInit(), self.data)
1313
1314 # Too many people use -b because they think it's how you normally
1315 # specify a target to be built, so show a warning
1316 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1317
1318 self.buildFileInternal(buildfile, task)
1319
1320 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1321 """
1322 Build the file matching regexp buildfile
1323 """
1324
1325 # Parse the configuration here. We need to do it explicitly here since
1326 # buildFile() doesn't use the cache
1327 self.parseConfiguration()
1328
1329 # If we are told to do the None task then query the default task
1330 if task is None:
1331 task = self.configuration.cmd
1332 if not task.startswith("do_"):
1333 task = "do_%s" % task
1334
1335 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
1336 fn = self.matchFile(fn, mc)
1337
1338 self.buildSetVars()
1339 self.reset_mtime_caches()
1340
1341 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
1342
1343 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1344 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
1345 infos = dict(infos)
1346
1347 fn = bb.cache.realfn2virtual(fn, cls, mc)
1348 try:
1349 info_array = infos[fn]
1350 except KeyError:
1351 bb.fatal("%s does not exist" % fn)
1352
1353 if info_array[0].skipped:
1354 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1355
1356 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
1357
1358 # Tweak some variables
1359 item = info_array[0].pn
1360 self.recipecaches[mc].ignored_dependencies = set()
1361 self.recipecaches[mc].bbfile_priority[fn] = 1
1362 self.configuration.limited_deps = True
1363
1364 # Remove external dependencies
1365 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1366 self.recipecaches[mc].deps[fn] = []
1367 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1368 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1369
1370 bb.parse.siggen.setup_datacache(self.recipecaches)
1371
1372 # Invalidate task for target if force mode active
1373 if self.configuration.force:
1374 logger.verbose("Invalidate task %s, %s", task, fn)
1375 bb.parse.siggen.invalidate_task(task, fn)
1376
1377 # Setup taskdata structure
1378 taskdata = {}
1379 taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
1380 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1381
1382 if quietlog:
1383 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1384 bb.runqueue.logger.setLevel(logging.WARNING)
1385
1386 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1387 if fireevents:
1388 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1389 if self.eventlog:
1390 self.eventlog[2].write_variables()
1391 bb.event.enable_heartbeat()
1392
1393 # Execute the runqueue
1394 runlist = [[mc, item, task, fn]]
1395
1396 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1397
1398 def buildFileIdle(server, rq, halt):
1399
1400 msg = None
1401 interrupted = 0
1402 if halt or self.state == state.forceshutdown:
1403 rq.finish_runqueue(True)
1404 msg = "Forced shutdown"
1405 interrupted = 2
1406 elif self.state == state.shutdown:
1407 rq.finish_runqueue(False)
1408 msg = "Stopped build"
1409 interrupted = 1
1410 failures = 0
1411 try:
1412 retval = rq.execute_runqueue()
1413 except runqueue.TaskFailure as exc:
1414 failures += len(exc.args)
1415 retval = False
1416 except SystemExit as exc:
1417 if quietlog:
1418 bb.runqueue.logger.setLevel(rqloglevel)
1419 return bb.server.process.idleFinish(str(exc))
1420
1421 if not retval:
1422 if fireevents:
1423 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1424 bb.event.disable_heartbeat()
1425 # We trashed self.recipecaches above
1426 self._parsecache_set(False)
1427 self.configuration.limited_deps = False
1428 bb.parse.siggen.reset(self.data)
1429 if quietlog:
1430 bb.runqueue.logger.setLevel(rqloglevel)
1431 return bb.server.process.idleFinish(msg)
1432 if retval is True:
1433 return True
1434 return retval
1435
1436 self.idleCallBackRegister(buildFileIdle, rq)
1437
1438 def getTaskSignatures(self, target, tasks):
1439 sig = []
1440 getAllTaskSignatures = False
1441
1442 if not tasks:
1443 tasks = ["do_build"]
1444 getAllTaskSignatures = True
1445
1446 for task in tasks:
1447 taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt)
1448 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1449 rq.rqdata.prepare()
1450
1451 for l in runlist:
1452 mc, pn, taskname, fn = l
1453
1454 taskdep = rq.rqdata.dataCaches[mc].task_deps[fn]
1455 for t in taskdep['tasks']:
1456 if t in taskdep['nostamp'] or "setscene" in t:
1457 continue
1458 tid = bb.runqueue.build_tid(mc, fn, t)
1459
1460 if t in task or getAllTaskSignatures:
1461 try:
1462 rq.rqdata.prepare_task_hash(tid)
1463 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1464 except KeyError:
1465 sig.append(self.getTaskSignatures(target, [t])[0])
1466
1467 return sig
1468
1469 def buildTargets(self, targets, task):
1470 """
1471 Attempt to build the targets specified
1472 """
1473
1474 def buildTargetsIdle(server, rq, halt):
1475 msg = None
1476 interrupted = 0
1477 if halt or self.state == state.forceshutdown:
1478 bb.event._should_exit.set()
1479 rq.finish_runqueue(True)
1480 msg = "Forced shutdown"
1481 interrupted = 2
1482 elif self.state == state.shutdown:
1483 rq.finish_runqueue(False)
1484 msg = "Stopped build"
1485 interrupted = 1
1486 failures = 0
1487 try:
1488 retval = rq.execute_runqueue()
1489 except runqueue.TaskFailure as exc:
1490 failures += len(exc.args)
1491 retval = False
1492 except SystemExit as exc:
1493 return bb.server.process.idleFinish(str(exc))
1494
1495 if not retval:
1496 try:
1497 for mc in self.multiconfigs:
1498 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1499 finally:
1500 bb.event.disable_heartbeat()
1501 return bb.server.process.idleFinish(msg)
1502
1503 if retval is True:
1504 return True
1505 return retval
1506
1507 self.reset_mtime_caches()
1508 self.buildSetVars()
1509
1510 # If we are told to do the None task then query the default task
1511 if task is None:
1512 task = self.configuration.cmd
1513
1514 if not task.startswith("do_"):
1515 task = "do_%s" % task
1516
1517 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1518
1519 bb.event.fire(bb.event.BuildInit(packages), self.data)
1520
1521 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
1522
1523 buildname = self.data.getVar("BUILDNAME", False)
1524
1525 # make targets to always look as <target>:do_<task>
1526 ntargets = []
1527 for target in runlist:
1528 if target[0]:
1529 ntargets.append("mc:%s:%s:%s" % (target[0], target[1], target[2]))
1530 ntargets.append("%s:%s" % (target[1], target[2]))
1531
1532 for mc in self.multiconfigs:
1533 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1534 if self.eventlog:
1535 self.eventlog[2].write_variables()
1536 bb.event.enable_heartbeat()
1537
1538 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1539 if 'universe' in targets:
1540 rq.rqdata.warn_multi_bb = True
1541
1542 self.idleCallBackRegister(buildTargetsIdle, rq)
1543
1544
1545 def getAllKeysWithFlags(self, flaglist):
1546 def dummy_autorev(d):
1547 return
1548
1549 dump = {}
1550 # Horrible but for now we need to avoid any sideeffects of autorev being called
1551 saved = bb.fetch2.get_autorev
1552 bb.fetch2.get_autorev = dummy_autorev
1553 for k in self.data.keys():
1554 try:
1555 expand = True
1556 flags = self.data.getVarFlags(k)
1557 if flags and "func" in flags and "python" in flags:
1558 expand = False
1559 v = self.data.getVar(k, expand)
1560 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1561 dump[k] = {
1562 'v' : str(v) ,
1563 'history' : self.data.varhistory.variable(k),
1564 }
1565 for d in flaglist:
1566 if flags and d in flags:
1567 dump[k][d] = flags[d]
1568 else:
1569 dump[k][d] = None
1570 except Exception as e:
1571 print(e)
1572 bb.fetch2.get_autorev = saved
1573 return dump
1574
1575
1576 def updateCacheSync(self):
1577 if self.state == state.running:
1578 return
1579
1580 if not self.baseconfig_valid:
1581 logger.debug("Reloading base configuration data")
1582 self.initConfigurationData()
1583 self.handlePRServ()
1584
1585 # This is called for all async commands when self.state != running
1586 def updateCache(self):
1587 if self.state == state.running:
1588 return
1589
1590 if self.state in (state.shutdown, state.forceshutdown, state.error):
1591 if hasattr(self.parser, 'shutdown'):
1592 self.parser.shutdown(clean=False)
1593 self.parser.final_cleanup()
1594 raise bb.BBHandledException()
1595
1596 if self.state != state.parsing:
1597 self.updateCacheSync()
1598
1599 if self.state != state.parsing and not self.parsecache_valid:
1600 bb.server.process.serverlog("Parsing started")
1601 self.parsewatched = {}
1602
1603 bb.parse.siggen.reset(self.data)
1604 self.parseConfiguration ()
1605 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1606 for mc in self.multiconfigs:
1607 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
1608
1609 for mc in self.multiconfigs:
1610 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
1611 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
1612
1613 for dep in self.configuration.extra_assume_provided:
1614 self.recipecaches[mc].ignored_dependencies.add(dep)
1615
1616 mcfilelist = {}
1617 total_masked = 0
1618 searchdirs = set()
1619 for mc in self.multiconfigs:
1620 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1621
1622 mcfilelist[mc] = filelist
1623 total_masked += masked
1624 searchdirs |= set(search)
1625
1626 # Add mtimes for directories searched for bb/bbappend files
1627 for dirent in searchdirs:
1628 self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))])
1629
1630 self.parser = CookerParser(self, mcfilelist, total_masked)
1631 self._parsecache_set(True)
1632
1633 self.state = state.parsing
1634
1635 if not self.parser.parse_next():
1636 collectlog.debug("parsing complete")
1637 if self.parser.error:
1638 raise bb.BBHandledException()
1639 self.show_appends_with_no_recipes()
1640 self.handlePrefProviders()
1641 for mc in self.multiconfigs:
1642 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1643 self.state = state.running
1644
1645 # Send an event listing all stamps reachable after parsing
1646 # which the metadata may use to clean up stale data
1647 for mc in self.multiconfigs:
1648 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1649 bb.event.fire(event, self.databuilder.mcdata[mc])
1650 return None
1651
1652 return True
1653
1654 def checkPackages(self, pkgs_to_build, task=None):
1655
1656 # Return a copy, don't modify the original
1657 pkgs_to_build = pkgs_to_build[:]
1658
1659 if not pkgs_to_build:
1660 raise NothingToBuild
1661
1662 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
1663 for pkg in pkgs_to_build.copy():
1664 if pkg in ignore:
1665 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1666 if pkg.startswith("multiconfig:"):
1667 pkgs_to_build.remove(pkg)
1668 pkgs_to_build.append(pkg.replace("multiconfig:", "mc:"))
1669
1670 if 'world' in pkgs_to_build:
1671 pkgs_to_build.remove('world')
1672 for mc in self.multiconfigs:
1673 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1674 for t in self.recipecaches[mc].world_target:
1675 if mc:
1676 t = "mc:" + mc + ":" + t
1677 pkgs_to_build.append(t)
1678
1679 if 'universe' in pkgs_to_build:
1680 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1681 parselog.debug("collating packages for \"universe\"")
1682 pkgs_to_build.remove('universe')
1683 for mc in self.multiconfigs:
1684 for t in self.recipecaches[mc].universe_target:
1685 if task:
1686 foundtask = False
1687 for provider_fn in self.recipecaches[mc].providers[t]:
1688 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1689 foundtask = True
1690 break
1691 if not foundtask:
1692 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1693 continue
1694 if mc:
1695 t = "mc:" + mc + ":" + t
1696 pkgs_to_build.append(t)
1697
1698 return pkgs_to_build
1699
1700 def pre_serve(self):
1701 return
1702
1703 def post_serve(self):
1704 self.shutdown(force=True)
1705 prserv.serv.auto_shutdown()
1706 if hasattr(bb.parse, "siggen"):
1707 bb.parse.siggen.exit()
1708 if self.hashserv:
1709 self.hashserv.process.terminate()
1710 self.hashserv.process.join()
1711 if hasattr(self, "data"):
1712 bb.event.fire(CookerExit(), self.data)
1713
1714 def shutdown(self, force=False):
1715 if force:
1716 self.state = state.forceshutdown
1717 bb.event._should_exit.set()
1718 else:
1719 self.state = state.shutdown
1720
1721 if self.parser:
1722 self.parser.shutdown(clean=False)
1723 self.parser.final_cleanup()
1724
1725 def finishcommand(self):
1726 if hasattr(self.parser, 'shutdown'):
1727 self.parser.shutdown(clean=False)
1728 self.parser.final_cleanup()
1729 self.state = state.initial
1730 bb.event._should_exit.clear()
1731
1732 def reset(self):
1733 if hasattr(bb.parse, "siggen"):
1734 bb.parse.siggen.exit()
1735 self.finishcommand()
1736 self.initConfigurationData()
1737 self.handlePRServ()
1738
1739 def clientComplete(self):
1740 """Called when the client is done using the server"""
1741 self.finishcommand()
1742 self.extraconfigdata = {}
1743 self.command.reset()
1744 if hasattr(self, "data"):
1745 self.databuilder.reset()
1746 self.data = self.databuilder.data
1747 # In theory tinfoil could have modified the base data before parsing,
1748 # ideally need to track if anything did modify the datastore
1749 self._parsecache_set(False)
1750
1751class CookerExit(bb.event.Event):
1752 """
1753 Notify clients of the Cooker shutdown
1754 """
1755
1756 def __init__(self):
1757 bb.event.Event.__init__(self)
1758
1759
1760class CookerCollectFiles(object):
1761 def __init__(self, priorities, mc=''):
1762 self.mc = mc
1763 self.bbappends = []
1764 # Priorities is a list of tuples, with the second element as the pattern.
1765 # We need to sort the list with the longest pattern first, and so on to
1766 # the shortest. This allows nested layers to be properly evaluated.
1767 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1768
1769 def calc_bbfile_priority(self, filename):
1770 for layername, _, regex, pri in self.bbfile_config_priorities:
1771 if regex.match(filename):
1772 return pri, regex, layername
1773 return 0, None, None
1774
1775 def get_bbfiles(self):
1776 """Get list of default .bb files by reading out the current directory"""
1777 path = os.getcwd()
1778 contents = os.listdir(path)
1779 bbfiles = []
1780 for f in contents:
1781 if f.endswith(".bb"):
1782 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1783 return bbfiles
1784
1785 def find_bbfiles(self, path):
1786 """Find all the .bb and .bbappend files in a directory"""
1787 found = []
1788 for dir, dirs, files in os.walk(path):
1789 for ignored in ('SCCS', 'CVS', '.svn'):
1790 if ignored in dirs:
1791 dirs.remove(ignored)
1792 found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
1793
1794 return found
1795
1796 def collect_bbfiles(self, config, eventdata):
1797 """Collect all available .bb build files"""
1798 masked = 0
1799
1800 collectlog.debug("collecting .bb files")
1801
1802 files = (config.getVar( "BBFILES") or "").split()
1803
1804 # Sort files by priority
1805 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1806 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1807
1808 if not files:
1809 files = self.get_bbfiles()
1810
1811 if not files:
1812 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1813 bb.event.fire(CookerExit(), eventdata)
1814
1815 # We need to track where we look so that we can know when the cache is invalid. There
1816 # is no nice way to do this, this is horrid. We intercept the os.listdir()
1817 # (or os.scandir() for python 3.6+) calls while we run glob().
1818 origlistdir = os.listdir
1819 if hasattr(os, 'scandir'):
1820 origscandir = os.scandir
1821 searchdirs = []
1822
1823 def ourlistdir(d):
1824 searchdirs.append(d)
1825 return origlistdir(d)
1826
1827 def ourscandir(d):
1828 searchdirs.append(d)
1829 return origscandir(d)
1830
1831 os.listdir = ourlistdir
1832 if hasattr(os, 'scandir'):
1833 os.scandir = ourscandir
1834 try:
1835 # Can't use set here as order is important
1836 newfiles = []
1837 for f in files:
1838 if os.path.isdir(f):
1839 dirfiles = self.find_bbfiles(f)
1840 for g in dirfiles:
1841 if g not in newfiles:
1842 newfiles.append(g)
1843 else:
1844 globbed = glob.glob(f)
1845 if not globbed and os.path.exists(f):
1846 globbed = [f]
1847 # glob gives files in order on disk. Sort to be deterministic.
1848 for g in sorted(globbed):
1849 if g not in newfiles:
1850 newfiles.append(g)
1851 finally:
1852 os.listdir = origlistdir
1853 if hasattr(os, 'scandir'):
1854 os.scandir = origscandir
1855
1856 bbmask = config.getVar('BBMASK')
1857
1858 if bbmask:
1859 # First validate the individual regular expressions and ignore any
1860 # that do not compile
1861 bbmasks = []
1862 for mask in bbmask.split():
1863 # When constructing an older style single regex, it's possible for BBMASK
1864 # to end up beginning with '|', which matches and masks _everything_.
1865 if mask.startswith("|"):
1866 collectlog.warning("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1867 mask = mask[1:]
1868 try:
1869 re.compile(mask)
1870 bbmasks.append(mask)
1871 except re.error:
1872 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1873
1874 # Then validate the combined regular expressions. This should never
1875 # fail, but better safe than sorry...
1876 bbmask = "|".join(bbmasks)
1877 try:
1878 bbmask_compiled = re.compile(bbmask)
1879 except re.error:
1880 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1881 bbmask = None
1882
1883 bbfiles = []
1884 bbappend = []
1885 for f in newfiles:
1886 if bbmask and bbmask_compiled.search(f):
1887 collectlog.debug("skipping masked file %s", f)
1888 masked += 1
1889 continue
1890 if f.endswith('.bb'):
1891 bbfiles.append(f)
1892 elif f.endswith('.bbappend'):
1893 bbappend.append(f)
1894 else:
1895 collectlog.debug("skipping %s: unknown file extension", f)
1896
1897 # Build a list of .bbappend files for each .bb file
1898 for f in bbappend:
1899 base = os.path.basename(f).replace('.bbappend', '.bb')
1900 self.bbappends.append((base, f))
1901
1902 # Find overlayed recipes
1903 # bbfiles will be in priority order which makes this easy
1904 bbfile_seen = dict()
1905 self.overlayed = defaultdict(list)
1906 for f in reversed(bbfiles):
1907 base = os.path.basename(f)
1908 if base not in bbfile_seen:
1909 bbfile_seen[base] = f
1910 else:
1911 topfile = bbfile_seen[base]
1912 self.overlayed[topfile].append(f)
1913
1914 return (bbfiles, masked, searchdirs)
1915
1916 def get_file_appends(self, fn):
1917 """
1918 Returns a list of .bbappend files to apply to fn
1919 """
1920 filelist = []
1921 f = os.path.basename(fn)
1922 for b in self.bbappends:
1923 (bbappend, filename) = b
1924 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1925 filelist.append(filename)
1926 return tuple(filelist)
1927
1928 def collection_priorities(self, pkgfns, fns, d):
1929 # Return the priorities of the entries in pkgfns
1930 # Also check that all the regexes in self.bbfile_config_priorities are used
1931 # (but to do that we need to ensure skipped recipes aren't counted, nor
1932 # collections in BBFILE_PATTERN_IGNORE_EMPTY)
1933
1934 priorities = {}
1935 seen = set()
1936 matched = set()
1937
1938 matched_regex = set()
1939 unmatched_regex = set()
1940 for _, _, regex, _ in self.bbfile_config_priorities:
1941 unmatched_regex.add(regex)
1942
1943 # Calculate priorities for each file
1944 for p in pkgfns:
1945 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1946 priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
1947 if regex in unmatched_regex:
1948 matched_regex.add(regex)
1949 unmatched_regex.remove(regex)
1950 seen.add(realfn)
1951 if regex:
1952 matched.add(realfn)
1953
1954 if unmatched_regex:
1955 # Account for bbappend files
1956 for b in self.bbappends:
1957 (bbfile, append) = b
1958 seen.add(append)
1959
1960 # Account for skipped recipes
1961 seen.update(fns)
1962
1963 seen.difference_update(matched)
1964
1965 def already_matched(fn):
1966 for regex in matched_regex:
1967 if regex.match(fn):
1968 return True
1969 return False
1970
1971 for unmatch in unmatched_regex.copy():
1972 for fn in seen:
1973 if unmatch.match(fn):
1974 # If the bbappend or file was already matched by another regex, skip it
1975 # e.g. for a layer within a layer, the outer regex could match, the inner
1976 # regex may match nothing and we should warn about that
1977 if already_matched(fn):
1978 continue
1979 unmatched_regex.remove(unmatch)
1980 break
1981
1982 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1983 if regex in unmatched_regex:
1984 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
1985 collectlog.warning("No bb files in %s matched BBFILE_PATTERN_%s '%s'" % (self.mc if self.mc else 'default',
1986 collection, pattern))
1987
1988 return priorities
1989
1990class ParsingFailure(Exception):
1991 def __init__(self, realexception, recipe):
1992 self.realexception = realexception
1993 self.recipe = recipe
1994 Exception.__init__(self, realexception, recipe)
1995
1996class Parser(multiprocessing.Process):
1997 def __init__(self, jobs, results, quit, profile):
1998 self.jobs = jobs
1999 self.results = results
2000 self.quit = quit
2001 multiprocessing.Process.__init__(self)
2002 self.context = bb.utils.get_context().copy()
2003 self.handlers = bb.event.get_class_handlers().copy()
2004 self.profile = profile
2005 self.queue_signals = False
2006 self.signal_received = []
2007 self.signal_threadlock = threading.Lock()
2008
2009 def catch_sig(self, signum, frame):
2010 if self.queue_signals:
2011 self.signal_received.append(signum)
2012 else:
2013 self.handle_sig(signum, frame)
2014
2015 def handle_sig(self, signum, frame):
2016 if signum == signal.SIGTERM:
2017 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2018 os.kill(os.getpid(), signal.SIGTERM)
2019 elif signum == signal.SIGINT:
2020 signal.default_int_handler(signum, frame)
2021
2022 def run(self):
2023
2024 if not self.profile:
2025 self.realrun()
2026 return
2027
2028 try:
2029 import cProfile as profile
2030 except:
2031 import profile
2032 prof = profile.Profile()
2033 try:
2034 profile.Profile.runcall(prof, self.realrun)
2035 finally:
2036 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
2037 prof.dump_stats(logfile)
2038
2039 def realrun(self):
2040 # Signal handling here is hard. We must not terminate any process or thread holding the write
2041 # lock for the event stream as it will not be released, ever, and things will hang.
2042 # Python handles signals in the main thread/process but they can be raised from any thread and
2043 # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2044 # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2045 # new thread should also do so) and we defer handling but we handle with the local thread lock
2046 # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2047 # can be in the critical section.
2048 signal.signal(signal.SIGTERM, self.catch_sig)
2049 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2050 signal.signal(signal.SIGINT, self.catch_sig)
2051 bb.utils.set_process_name(multiprocessing.current_process().name)
2052 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2053 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2054
2055 pending = []
2056 havejobs = True
2057 try:
2058 while havejobs or pending:
2059 if self.quit.is_set():
2060 break
2061
2062 job = None
2063 try:
2064 job = self.jobs.pop()
2065 except IndexError:
2066 havejobs = False
2067 if job:
2068 result = self.parse(*job)
2069 # Clear the siggen cache after parsing to control memory usage, its huge
2070 bb.parse.siggen.postparsing_clean_cache()
2071 pending.append(result)
2072
2073 if pending:
2074 try:
2075 result = pending.pop()
2076 self.results.put(result, timeout=0.05)
2077 except queue.Full:
2078 pending.append(result)
2079 finally:
2080 self.results.close()
2081 self.results.join_thread()
2082
2083 def parse(self, mc, cache, filename, appends, layername):
2084 try:
2085 origfilter = bb.event.LogHandler.filter
2086 # Record the filename we're parsing into any events generated
2087 def parse_filter(self, record):
2088 record.taskpid = bb.event.worker_pid
2089 record.fn = filename
2090 return True
2091
2092 # Reset our environment and handlers to the original settings
2093 bb.utils.set_context(self.context.copy())
2094 bb.event.set_class_handlers(self.handlers.copy())
2095 bb.event.LogHandler.filter = parse_filter
2096
2097 return True, mc, cache.parse(filename, appends, layername)
2098 except Exception as exc:
2099 tb = sys.exc_info()[2]
2100 exc.recipe = filename
2101 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2102 return True, None, exc
2103 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2104 # and for example a worker thread doesn't just exit on its own in response to
2105 # a SystemExit event for example.
2106 except BaseException as exc:
2107 return True, None, ParsingFailure(exc, filename)
2108 finally:
2109 bb.event.LogHandler.filter = origfilter
2110
2111class CookerParser(object):
2112 def __init__(self, cooker, mcfilelist, masked):
2113 self.mcfilelist = mcfilelist
2114 self.cooker = cooker
2115 self.cfgdata = cooker.data
2116 self.cfghash = cooker.data_hash
2117 self.cfgbuilder = cooker.databuilder
2118
2119 # Accounting statistics
2120 self.parsed = 0
2121 self.cached = 0
2122 self.error = 0
2123 self.masked = masked
2124
2125 self.skipped = 0
2126 self.virtuals = 0
2127
2128 self.current = 0
2129 self.process_names = []
2130
2131 self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2132 self.fromcache = set()
2133 self.willparse = set()
2134 for mc in self.cooker.multiconfigs:
2135 for filename in self.mcfilelist[mc]:
2136 appends = self.cooker.collections[mc].get_file_appends(filename)
2137 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2138 if not self.bb_caches[mc].cacheValid(filename, appends):
2139 self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername))
2140 else:
2141 self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
2142
2143 self.total = len(self.fromcache) + len(self.willparse)
2144 self.toparse = len(self.willparse)
2145 self.progress_chunk = int(max(self.toparse / 100, 1))
2146
2147 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2148 multiprocessing.cpu_count()), self.toparse)
2149
2150 bb.cache.SiggenRecipeInfo.reset()
2151 self.start()
2152 self.haveshutdown = False
2153 self.syncthread = None
2154
2155 def start(self):
2156 self.results = self.load_cached()
2157 self.processes = []
2158 if self.toparse:
2159 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2160
2161 self.parser_quit = multiprocessing.Event()
2162 self.result_queue = multiprocessing.Queue()
2163
2164 def chunkify(lst,n):
2165 return [lst[i::n] for i in range(n)]
2166 self.jobs = chunkify(list(self.willparse), self.num_processes)
2167
2168 for i in range(0, self.num_processes):
2169 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2170 parser.start()
2171 self.process_names.append(parser.name)
2172 self.processes.append(parser)
2173
2174 self.results = itertools.chain(self.results, self.parse_generator())
2175
2176 def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
2177 if not self.toparse:
2178 return
2179 if self.haveshutdown:
2180 return
2181 self.haveshutdown = True
2182
2183 if clean:
2184 event = bb.event.ParseCompleted(self.cached, self.parsed,
2185 self.skipped, self.masked,
2186 self.virtuals, self.error,
2187 self.total)
2188
2189 bb.event.fire(event, self.cfgdata)
2190 else:
2191 bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
2192 bb.error("Parsing halted due to errors, see error messages above")
2193
2194 # Cleanup the queue before call process.join(), otherwise there might be
2195 # deadlocks.
2196 while True:
2197 try:
2198 self.result_queue.get(timeout=0.25)
2199 except queue.Empty:
2200 break
2201
2202 def sync_caches():
2203 for c in self.bb_caches.values():
2204 bb.cache.SiggenRecipeInfo.reset()
2205 c.sync()
2206
2207 self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
2208 self.syncthread.start()
2209
2210 self.parser_quit.set()
2211
2212 for process in self.processes:
2213 process.join(0.5)
2214
2215 for process in self.processes:
2216 if process.exitcode is None:
2217 os.kill(process.pid, signal.SIGINT)
2218
2219 for process in self.processes:
2220 process.join(0.5)
2221
2222 for process in self.processes:
2223 if process.exitcode is None:
2224 process.terminate()
2225
2226 for process in self.processes:
2227 process.join()
2228 # Added in 3.7, cleans up zombies
2229 if hasattr(process, "close"):
2230 process.close()
2231
2232 bb.codeparser.parser_cache_save()
2233 bb.codeparser.parser_cache_savemerge()
2234 bb.cache.SiggenRecipeInfo.reset()
2235 bb.fetch.fetcher_parse_done()
2236 if self.cooker.configuration.profile:
2237 profiles = []
2238 for i in self.process_names:
2239 logfile = "profile-parse-%s.log" % i
2240 if os.path.exists(logfile):
2241 profiles.append(logfile)
2242
2243 pout = "profile-parse.log.processed"
2244 bb.utils.process_profilelog(profiles, pout = pout)
2245 print("Processed parsing statistics saved to %s" % (pout))
2246
2247 def final_cleanup(self):
2248 if self.syncthread:
2249 self.syncthread.join()
2250
2251 def load_cached(self):
2252 for mc, cache, filename, appends, layername in self.fromcache:
2253 infos = cache.loadCached(filename, appends)
2254 yield False, mc, infos
2255
2256 def parse_generator(self):
2257 empty = False
2258 while self.processes or not empty:
2259 for process in self.processes.copy():
2260 if not process.is_alive():
2261 process.join()
2262 self.processes.remove(process)
2263
2264 if self.parsed >= self.toparse:
2265 break
2266
2267 try:
2268 result = self.result_queue.get(timeout=0.25)
2269 except queue.Empty:
2270 empty = True
2271 yield None, None, None
2272 else:
2273 empty = False
2274 yield result
2275
2276 if not (self.parsed >= self.toparse):
2277 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2278
2279
2280 def parse_next(self):
2281 result = []
2282 parsed = None
2283 try:
2284 parsed, mc, result = next(self.results)
2285 if isinstance(result, BaseException):
2286 # Turn exceptions back into exceptions
2287 raise result
2288 if parsed is None:
2289 # Timeout, loop back through the main loop
2290 return True
2291
2292 except StopIteration:
2293 self.shutdown()
2294 return False
2295 except bb.BBHandledException as exc:
2296 self.error += 1
2297 logger.debug('Failed to parse recipe: %s' % exc.recipe)
2298 self.shutdown(clean=False)
2299 return False
2300 except ParsingFailure as exc:
2301 self.error += 1
2302 logger.error('Unable to parse %s: %s' %
2303 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2304 self.shutdown(clean=False)
2305 return False
2306 except bb.parse.ParseError as exc:
2307 self.error += 1
2308 logger.error(str(exc))
2309 self.shutdown(clean=False, eventmsg=str(exc))
2310 return False
2311 except bb.data_smart.ExpansionError as exc:
2312 self.error += 1
2313 bbdir = os.path.dirname(__file__) + os.sep
2314 etype, value, _ = sys.exc_info()
2315 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2316 logger.error('ExpansionError during parsing %s', value.recipe,
2317 exc_info=(etype, value, tb))
2318 self.shutdown(clean=False)
2319 return False
2320 except Exception as exc:
2321 self.error += 1
2322 etype, value, tb = sys.exc_info()
2323 if hasattr(value, "recipe"):
2324 logger.error('Unable to parse %s' % value.recipe,
2325 exc_info=(etype, value, exc.traceback))
2326 else:
2327 # Most likely, an exception occurred during raising an exception
2328 import traceback
2329 logger.error('Exception during parse: %s' % traceback.format_exc())
2330 self.shutdown(clean=False)
2331 return False
2332
2333 self.current += 1
2334 self.virtuals += len(result)
2335 if parsed:
2336 self.parsed += 1
2337 if self.parsed % self.progress_chunk == 0:
2338 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2339 self.cfgdata)
2340 else:
2341 self.cached += 1
2342
2343 for virtualfn, info_array in result:
2344 if info_array[0].skipped:
2345 self.skipped += 1
2346 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2347 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2348 parsed=parsed, watcher = self.cooker.add_filewatch)
2349 return True
2350
2351 def reparse(self, filename):
2352 bb.cache.SiggenRecipeInfo.reset()
2353 to_reparse = set()
2354 for mc in self.cooker.multiconfigs:
2355 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2356 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
2357
2358 for mc, filename, appends, layername in to_reparse:
2359 infos = self.bb_caches[mc].parse(filename, appends, layername)
2360 for vfn, info_array in infos:
2361 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)