summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/cooker.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/cooker.py')
-rw-r--r--bitbake/lib/bb/cooker.py2025
1 files changed, 2025 insertions, 0 deletions
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
new file mode 100644
index 0000000000..879d2ba065
--- /dev/null
+++ b/bitbake/lib/bb/cooker.py
@@ -0,0 +1,2025 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25from __future__ import print_function
26import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
33from cStringIO import StringIO
34from contextlib import closing
35from functools import wraps
36from collections import defaultdict
37import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
39import Queue
40import signal
41import prserv.serv
42import pyinotify
43
44logger = logging.getLogger("BitBake")
45collectlog = logging.getLogger("BitBake.Collection")
46buildlog = logging.getLogger("BitBake.Build")
47parselog = logging.getLogger("BitBake.Parsing")
48providerlog = logging.getLogger("BitBake.Provider")
49
50class NoSpecificMatch(bb.BBHandledException):
51 """
52 Exception raised when no or multiple file matches are found
53 """
54
55class NothingToBuild(Exception):
56 """
57 Exception raised when there is nothing to build
58 """
59
60class CollectionError(bb.BBHandledException):
61 """
62 Exception raised when layer configuration is incorrect
63 """
64
65class state:
66 initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
67
68
69class SkippedPackage:
70 def __init__(self, info = None, reason = None):
71 self.pn = None
72 self.skipreason = None
73 self.provides = None
74 self.rprovides = None
75
76 if info:
77 self.pn = info.pn
78 self.skipreason = info.skipreason
79 self.provides = info.provides
80 self.rprovides = info.rprovides
81 elif reason:
82 self.skipreason = reason
83
84
85class CookerFeatures(object):
86 _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
87
88 def __init__(self):
89 self._features=set()
90
91 def setFeature(self, f):
92 # validate we got a request for a feature we support
93 if f not in CookerFeatures._feature_list:
94 return
95 self._features.add(f)
96
97 def __contains__(self, f):
98 return f in self._features
99
100 def __iter__(self):
101 return self._features.__iter__()
102
103 def next(self):
104 return self._features.next()
105
106
107#============================================================================#
108# BBCooker
109#============================================================================#
110class BBCooker:
111 """
112 Manages one bitbake build run
113 """
114
115 def __init__(self, configuration, featureSet = []):
116 self.recipecache = None
117 self.skiplist = {}
118 self.featureset = CookerFeatures()
119 for f in featureSet:
120 self.featureset.setFeature(f)
121
122 self.configuration = configuration
123
124 self.configwatcher = pyinotify.WatchManager()
125 self.configwatcher.bbseen = []
126 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
127 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
128 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
129 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
130 self.watcher = pyinotify.WatchManager()
131 self.watcher.bbseen = []
132 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
133
134
135 self.initConfigurationData()
136
137 self.inotify_modified_files = []
138
139 def _process_inotify_updates(server, notifier_list, abort):
140 for n in notifier_list:
141 if n.check_events(timeout=0):
142 # read notified events and enqeue them
143 n.read_events()
144 n.process_events()
145 return 1.0
146
147 self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
148
149 self.baseconfig_valid = True
150 self.parsecache_valid = False
151
152 # Take a lock so only one copy of bitbake can run against a given build
153 # directory at a time
154 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
155 self.lock = bb.utils.lockfile(lockfile, False, False)
156 if not self.lock:
157 bb.fatal("Only one copy of bitbake should be run against a build directory")
158 try:
159 self.lock.seek(0)
160 self.lock.truncate()
161 if len(configuration.interface) >= 2:
162 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
163 self.lock.flush()
164 except:
165 pass
166
167 # TOSTOP must not be set or our children will hang when they output
168 fd = sys.stdout.fileno()
169 if os.isatty(fd):
170 import termios
171 tcattr = termios.tcgetattr(fd)
172 if tcattr[3] & termios.TOSTOP:
173 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
174 tcattr[3] = tcattr[3] & ~termios.TOSTOP
175 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
176
177 self.command = bb.command.Command(self)
178 self.state = state.initial
179
180 self.parser = None
181
182 signal.signal(signal.SIGTERM, self.sigterm_exception)
183 # Let SIGHUP exit as SIGTERM
184 signal.signal(signal.SIGHUP, self.sigterm_exception)
185
186 def config_notifications(self, event):
187 if not event.path in self.inotify_modified_files:
188 self.inotify_modified_files.append(event.path)
189 self.baseconfig_valid = False
190
191 def notifications(self, event):
192 if not event.path in self.inotify_modified_files:
193 self.inotify_modified_files.append(event.path)
194 self.parsecache_valid = False
195
196 def add_filewatch(self, deps, watcher=None):
197 if not watcher:
198 watcher = self.watcher
199 for i in deps:
200 f = i[0]
201 if f in watcher.bbseen:
202 continue
203 watcher.bbseen.append(f)
204 while True:
205 # We try and add watches for files that don't exist but if they did, would influence
206 # the parser. The parent directory of these files may not exist, in which case we need
207 # to watch any parent that does exist for changes.
208 try:
209 watcher.add_watch(f, self.watchmask, quiet=False)
210 break
211 except pyinotify.WatchManagerError as e:
212 if 'ENOENT' in str(e):
213 f = os.path.dirname(f)
214 watcher.bbseen.append(f)
215 continue
216 raise
217
218 def sigterm_exception(self, signum, stackframe):
219 if signum == signal.SIGTERM:
220 bb.warn("Cooker recieved SIGTERM, shutting down...")
221 elif signum == signal.SIGHUP:
222 bb.warn("Cooker recieved SIGHUP, shutting down...")
223 self.state = state.forceshutdown
224
225 def setFeatures(self, features):
226 # we only accept a new feature set if we're in state initial, so we can reset without problems
227 if self.state != state.initial:
228 raise Exception("Illegal state for feature set change")
229 original_featureset = list(self.featureset)
230 for feature in features:
231 self.featureset.setFeature(feature)
232 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
233 if (original_featureset != list(self.featureset)):
234 self.reset()
235
236 def initConfigurationData(self):
237
238 self.state = state.initial
239 self.caches_array = []
240
241 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
242 self.enableDataTracking()
243
244 all_extra_cache_names = []
245 # We hardcode all known cache types in a single place, here.
246 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
247 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
248
249 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
250
251 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
252 # This is the entry point, no further check needed!
253 for var in caches_name_array:
254 try:
255 module_name, cache_name = var.split(':')
256 module = __import__(module_name, fromlist=(cache_name,))
257 self.caches_array.append(getattr(module, cache_name))
258 except ImportError as exc:
259 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
260 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
261
262 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
263 self.databuilder.parseBaseConfiguration()
264 self.data = self.databuilder.data
265 self.data_hash = self.databuilder.data_hash
266
267
268 # we log all events to a file if so directed
269 if self.configuration.writeeventlog:
270 import json, pickle
271 DEFAULT_EVENTFILE = self.configuration.writeeventlog
272 class EventLogWriteHandler():
273
274 class EventWriter():
275 def __init__(self, cooker):
276 self.file_inited = None
277 self.cooker = cooker
278 self.event_queue = []
279
280 def init_file(self):
281 try:
282 # delete the old log
283 os.remove(DEFAULT_EVENTFILE)
284 except:
285 pass
286
287 # write current configuration data
288 with open(DEFAULT_EVENTFILE, "w") as f:
289 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
290
291 def write_event(self, event):
292 with open(DEFAULT_EVENTFILE, "a") as f:
293 try:
294 f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
295 except Exception as e:
296 import traceback
297 print(e, traceback.format_exc(e))
298
299
300 def send(self, event):
301 event_class = event.__module__ + "." + event.__class__.__name__
302
303 # init on bb.event.BuildStarted
304 if self.file_inited is None:
305 if event_class == "bb.event.BuildStarted":
306 self.init_file()
307 self.file_inited = True
308
309 # write pending events
310 for e in self.event_queue:
311 self.write_event(e)
312
313 # also write the current event
314 self.write_event(event)
315
316 else:
317 # queue all events until the file is inited
318 self.event_queue.append(event)
319
320 else:
321 # we have the file, just write the event
322 self.write_event(event)
323
324 # set our handler's event processor
325 event = EventWriter(self) # self is the cooker here
326
327
328 # set up cooker features for this mock UI handler
329
330 # we need to write the dependency tree in the log
331 self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
332 # register the log file writer as UI Handler
333 bb.event.register_UIHhandler(EventLogWriteHandler())
334
335
336 #
337 # Special updated configuration we use for firing events
338 #
339 self.event_data = bb.data.createCopy(self.data)
340 bb.data.update_data(self.event_data)
341 bb.parse.init_parser(self.event_data)
342
343 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
344 self.disableDataTracking()
345
346 def enableDataTracking(self):
347 self.configuration.tracking = True
348 if hasattr(self, "data"):
349 self.data.enableTracking()
350
351 def disableDataTracking(self):
352 self.configuration.tracking = False
353 if hasattr(self, "data"):
354 self.data.disableTracking()
355
356 def modifyConfigurationVar(self, var, val, default_file, op):
357 if op == "append":
358 self.appendConfigurationVar(var, val, default_file)
359 elif op == "set":
360 self.saveConfigurationVar(var, val, default_file, "=")
361 elif op == "earlyAssign":
362 self.saveConfigurationVar(var, val, default_file, "?=")
363
364
365 def appendConfigurationVar(self, var, val, default_file):
366 #add append var operation to the end of default_file
367 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
368
369 total = "#added by hob"
370 total += "\n%s += \"%s\"\n" % (var, val)
371
372 with open(default_file, 'a') as f:
373 f.write(total)
374
375 #add to history
376 loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
377 self.data.appendVar(var, val, **loginfo)
378
379 def saveConfigurationVar(self, var, val, default_file, op):
380
381 replaced = False
382 #do not save if nothing changed
383 if str(val) == self.data.getVar(var):
384 return
385
386 conf_files = self.data.varhistory.get_variable_files(var)
387
388 #format the value when it is a list
389 if isinstance(val, list):
390 listval = ""
391 for value in val:
392 listval += "%s " % value
393 val = listval
394
395 topdir = self.data.getVar("TOPDIR")
396
397 #comment or replace operations made on var
398 for conf_file in conf_files:
399 if topdir in conf_file:
400 with open(conf_file, 'r') as f:
401 contents = f.readlines()
402
403 lines = self.data.varhistory.get_variable_lines(var, conf_file)
404 for line in lines:
405 total = ""
406 i = 0
407 for c in contents:
408 total += c
409 i = i + 1
410 if i==int(line):
411 end_index = len(total)
412 index = total.rfind(var, 0, end_index)
413
414 begin_line = total.count("\n",0,index)
415 end_line = int(line)
416
417 #check if the variable was saved before in the same way
418 #if true it replace the place where the variable was declared
419 #else it comments it
420 if contents[begin_line-1]== "#added by hob\n":
421 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
422 replaced = True
423 else:
424 for ii in range(begin_line, end_line):
425 contents[ii] = "#" + contents[ii]
426
427 with open(conf_file, 'w') as f:
428 f.writelines(contents)
429
430 if replaced == False:
431 #remove var from history
432 self.data.varhistory.del_var_history(var)
433
434 #add var to the end of default_file
435 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
436
437 #add the variable on a single line, to be easy to replace the second time
438 total = "\n#added by hob"
439 total += "\n%s %s \"%s\"\n" % (var, op, val)
440
441 with open(default_file, 'a') as f:
442 f.write(total)
443
444 #add to history
445 loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
446 self.data.setVar(var, val, **loginfo)
447
448 def removeConfigurationVar(self, var):
449 conf_files = self.data.varhistory.get_variable_files(var)
450 topdir = self.data.getVar("TOPDIR")
451
452 for conf_file in conf_files:
453 if topdir in conf_file:
454 with open(conf_file, 'r') as f:
455 contents = f.readlines()
456
457 lines = self.data.varhistory.get_variable_lines(var, conf_file)
458 for line in lines:
459 total = ""
460 i = 0
461 for c in contents:
462 total += c
463 i = i + 1
464 if i==int(line):
465 end_index = len(total)
466 index = total.rfind(var, 0, end_index)
467
468 begin_line = total.count("\n",0,index)
469
470 #check if the variable was saved before in the same way
471 if contents[begin_line-1]== "#added by hob\n":
472 contents[begin_line-1] = contents[begin_line] = "\n"
473 else:
474 contents[begin_line] = "\n"
475 #remove var from history
476 self.data.varhistory.del_var_history(var, conf_file, line)
477 #remove variable
478 self.data.delVar(var)
479
480 with open(conf_file, 'w') as f:
481 f.writelines(contents)
482
483 def createConfigFile(self, name):
484 path = os.getcwd()
485 confpath = os.path.join(path, "conf", name)
486 open(confpath, 'w').close()
487
488 def parseConfiguration(self):
489 # Set log file verbosity
490 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0"))
491 if verboselogs:
492 bb.msg.loggerVerboseLogs = True
493
494 # Change nice level if we're asked to
495 nice = self.data.getVar("BB_NICE_LEVEL", True)
496 if nice:
497 curnice = os.nice(0)
498 nice = int(nice) - curnice
499 buildlog.verbose("Renice to %s " % os.nice(nice))
500
501 if self.recipecache:
502 del self.recipecache
503 self.recipecache = bb.cache.CacheData(self.caches_array)
504
505 self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
506
507 def updateConfigOpts(self,options):
508 for o in options:
509 setattr(self.configuration, o, options[o])
510
511 def runCommands(self, server, data, abort):
512 """
513 Run any queued asynchronous command
514 This is done by the idle handler so it runs in true context rather than
515 tied to any UI.
516 """
517
518 return self.command.runAsyncCommand()
519
520 def showVersions(self):
521
522 pkg_pn = self.recipecache.pkg_pn
523 (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
524
525 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
526 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
527
528 for p in sorted(pkg_pn):
529 pref = preferred_versions[p]
530 latest = latest_versions[p]
531
532 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
533 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
534
535 if pref == latest:
536 prefstr = ""
537
538 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
539
540 def showEnvironment(self, buildfile = None, pkgs_to_build = []):
541 """
542 Show the outer or per-recipe environment
543 """
544 fn = None
545 envdata = None
546
547 if buildfile:
548 # Parse the configuration here. We need to do it explicitly here since
549 # this showEnvironment() code path doesn't use the cache
550 self.parseConfiguration()
551
552 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
553 fn = self.matchFile(fn)
554 fn = bb.cache.Cache.realfn2virtual(fn, cls)
555 elif len(pkgs_to_build) == 1:
556 ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
557 if pkgs_to_build[0] in set(ignore.split()):
558 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
559
560 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort)
561
562 targetid = taskdata.getbuild_id(pkgs_to_build[0])
563 fnid = taskdata.build_targets[targetid][0]
564 fn = taskdata.fn_index[fnid]
565 else:
566 envdata = self.data
567
568 if fn:
569 try:
570 envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
571 except Exception as e:
572 parselog.exception("Unable to read %s", fn)
573 raise
574
575 # Display history
576 with closing(StringIO()) as env:
577 self.data.inchistory.emit(env)
578 logger.plain(env.getvalue())
579
580 # emit variables and shell functions
581 data.update_data(envdata)
582 with closing(StringIO()) as env:
583 data.emit_env(env, envdata, True)
584 logger.plain(env.getvalue())
585
586 # emit the metadata which isnt valid shell
587 data.expandKeys(envdata)
588 for e in envdata.keys():
589 if data.getVarFlag( e, 'python', envdata ):
590 logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
591
592
593 def buildTaskData(self, pkgs_to_build, task, abort):
594 """
595 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
596 """
597 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
598
599 # A task of None means use the default task
600 if task is None:
601 task = self.configuration.cmd
602
603 fulltargetlist = self.checkPackages(pkgs_to_build)
604
605 localdata = data.createCopy(self.data)
606 bb.data.update_data(localdata)
607 bb.data.expandKeys(localdata)
608 taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist)
609
610 current = 0
611 runlist = []
612 for k in fulltargetlist:
613 ktask = task
614 if ":do_" in k:
615 k2 = k.split(":do_")
616 k = k2[0]
617 ktask = k2[1]
618 taskdata.add_provider(localdata, self.recipecache, k)
619 current += 1
620 runlist.append([k, "do_%s" % ktask])
621 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
622 taskdata.add_unresolved(localdata, self.recipecache)
623 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
624 return taskdata, runlist, fulltargetlist
625
626 def prepareTreeData(self, pkgs_to_build, task):
627 """
628 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
629 """
630
631 # We set abort to False here to prevent unbuildable targets raising
632 # an exception when we're just generating data
633 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
634
635 return runlist, taskdata
636
637 ######## WARNING : this function requires cache_extra to be enabled ########
638
639 def generateTaskDepTreeData(self, pkgs_to_build, task):
640 """
641 Create a dependency graph of pkgs_to_build including reverse dependency
642 information.
643 """
644 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
645 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
646 rq.rqdata.prepare()
647 return self.buildDependTree(rq, taskdata)
648
649
650 def buildDependTree(self, rq, taskdata):
651 seen_fnids = []
652 depend_tree = {}
653 depend_tree["depends"] = {}
654 depend_tree["tdepends"] = {}
655 depend_tree["pn"] = {}
656 depend_tree["rdepends-pn"] = {}
657 depend_tree["packages"] = {}
658 depend_tree["rdepends-pkg"] = {}
659 depend_tree["rrecs-pkg"] = {}
660 depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
661
662 for task in xrange(len(rq.rqdata.runq_fnid)):
663 taskname = rq.rqdata.runq_task[task]
664 fnid = rq.rqdata.runq_fnid[task]
665 fn = taskdata.fn_index[fnid]
666 pn = self.recipecache.pkg_fn[fn]
667 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
668 if pn not in depend_tree["pn"]:
669 depend_tree["pn"][pn] = {}
670 depend_tree["pn"][pn]["filename"] = fn
671 depend_tree["pn"][pn]["version"] = version
672 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
673
674 # if we have extra caches, list all attributes they bring in
675 extra_info = []
676 for cache_class in self.caches_array:
677 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
678 cachefields = getattr(cache_class, 'cachefields', [])
679 extra_info = extra_info + cachefields
680
681 # for all attributes stored, add them to the dependency tree
682 for ei in extra_info:
683 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
684
685
686 for dep in rq.rqdata.runq_depends[task]:
687 depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
688 deppn = self.recipecache.pkg_fn[depfn]
689 dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
690 if not dotname in depend_tree["tdepends"]:
691 depend_tree["tdepends"][dotname] = []
692 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
693 if fnid not in seen_fnids:
694 seen_fnids.append(fnid)
695 packages = []
696
697 depend_tree["depends"][pn] = []
698 for dep in taskdata.depids[fnid]:
699 depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
700
701 depend_tree["rdepends-pn"][pn] = []
702 for rdep in taskdata.rdepids[fnid]:
703 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
704
705 rdepends = self.recipecache.rundeps[fn]
706 for package in rdepends:
707 depend_tree["rdepends-pkg"][package] = []
708 for rdepend in rdepends[package]:
709 depend_tree["rdepends-pkg"][package].append(rdepend)
710 packages.append(package)
711
712 rrecs = self.recipecache.runrecs[fn]
713 for package in rrecs:
714 depend_tree["rrecs-pkg"][package] = []
715 for rdepend in rrecs[package]:
716 depend_tree["rrecs-pkg"][package].append(rdepend)
717 if not package in packages:
718 packages.append(package)
719
720 for package in packages:
721 if package not in depend_tree["packages"]:
722 depend_tree["packages"][package] = {}
723 depend_tree["packages"][package]["pn"] = pn
724 depend_tree["packages"][package]["filename"] = fn
725 depend_tree["packages"][package]["version"] = version
726
727 return depend_tree
728
729 ######## WARNING : this function requires cache_extra to be enabled ########
730 def generatePkgDepTreeData(self, pkgs_to_build, task):
731 """
732 Create a dependency tree of pkgs_to_build, returning the data.
733 """
734 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
735 tasks_fnid = []
736 if len(taskdata.tasks_name) != 0:
737 for task in xrange(len(taskdata.tasks_name)):
738 tasks_fnid.append(taskdata.tasks_fnid[task])
739
740 seen_fnids = []
741 depend_tree = {}
742 depend_tree["depends"] = {}
743 depend_tree["pn"] = {}
744 depend_tree["rdepends-pn"] = {}
745 depend_tree["rdepends-pkg"] = {}
746 depend_tree["rrecs-pkg"] = {}
747
748 # if we have extra caches, list all attributes they bring in
749 extra_info = []
750 for cache_class in self.caches_array:
751 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
752 cachefields = getattr(cache_class, 'cachefields', [])
753 extra_info = extra_info + cachefields
754
755 for task in xrange(len(tasks_fnid)):
756 fnid = tasks_fnid[task]
757 fn = taskdata.fn_index[fnid]
758 pn = self.recipecache.pkg_fn[fn]
759
760 if pn not in depend_tree["pn"]:
761 depend_tree["pn"][pn] = {}
762 depend_tree["pn"][pn]["filename"] = fn
763 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
764 depend_tree["pn"][pn]["version"] = version
765 rdepends = self.recipecache.rundeps[fn]
766 rrecs = self.recipecache.runrecs[fn]
767 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
768
769 # for all extra attributes stored, add them to the dependency tree
770 for ei in extra_info:
771 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
772
773 if fnid not in seen_fnids:
774 seen_fnids.append(fnid)
775
776 depend_tree["depends"][pn] = []
777 for dep in taskdata.depids[fnid]:
778 item = taskdata.build_names_index[dep]
779 pn_provider = ""
780 targetid = taskdata.getbuild_id(item)
781 if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
782 id = taskdata.build_targets[targetid][0]
783 fn_provider = taskdata.fn_index[id]
784 pn_provider = self.recipecache.pkg_fn[fn_provider]
785 else:
786 pn_provider = item
787 depend_tree["depends"][pn].append(pn_provider)
788
789 depend_tree["rdepends-pn"][pn] = []
790 for rdep in taskdata.rdepids[fnid]:
791 item = taskdata.run_names_index[rdep]
792 pn_rprovider = ""
793 targetid = taskdata.getrun_id(item)
794 if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
795 id = taskdata.run_targets[targetid][0]
796 fn_rprovider = taskdata.fn_index[id]
797 pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
798 else:
799 pn_rprovider = item
800 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
801
802 depend_tree["rdepends-pkg"].update(rdepends)
803 depend_tree["rrecs-pkg"].update(rrecs)
804
805 return depend_tree
806
807 def generateDepTreeEvent(self, pkgs_to_build, task):
808 """
809 Create a task dependency graph of pkgs_to_build.
810 Generate an event with the result
811 """
812 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
813 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
814
815 def generateDotGraphFiles(self, pkgs_to_build, task):
816 """
817 Create a task dependency graph of pkgs_to_build.
818 Save the result to a set of .dot files.
819 """
820
821 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
822
823 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
824 depends_file = file('pn-depends.dot', 'w' )
825 buildlist_file = file('pn-buildlist', 'w' )
826 print("digraph depends {", file=depends_file)
827 for pn in depgraph["pn"]:
828 fn = depgraph["pn"][pn]["filename"]
829 version = depgraph["pn"][pn]["version"]
830 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
831 print("%s" % pn, file=buildlist_file)
832 buildlist_file.close()
833 logger.info("PN build list saved to 'pn-buildlist'")
834 for pn in depgraph["depends"]:
835 for depend in depgraph["depends"][pn]:
836 print('"%s" -> "%s"' % (pn, depend), file=depends_file)
837 for pn in depgraph["rdepends-pn"]:
838 for rdepend in depgraph["rdepends-pn"][pn]:
839 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
840 print("}", file=depends_file)
841 logger.info("PN dependencies saved to 'pn-depends.dot'")
842
843 depends_file = file('package-depends.dot', 'w' )
844 print("digraph depends {", file=depends_file)
845 for package in depgraph["packages"]:
846 pn = depgraph["packages"][package]["pn"]
847 fn = depgraph["packages"][package]["filename"]
848 version = depgraph["packages"][package]["version"]
849 if package == pn:
850 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
851 else:
852 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
853 for depend in depgraph["depends"][pn]:
854 print('"%s" -> "%s"' % (package, depend), file=depends_file)
855 for package in depgraph["rdepends-pkg"]:
856 for rdepend in depgraph["rdepends-pkg"][package]:
857 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
858 for package in depgraph["rrecs-pkg"]:
859 for rdepend in depgraph["rrecs-pkg"][package]:
860 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
861 print("}", file=depends_file)
862 logger.info("Package dependencies saved to 'package-depends.dot'")
863
864 tdepends_file = file('task-depends.dot', 'w' )
865 print("digraph depends {", file=tdepends_file)
866 for task in depgraph["tdepends"]:
867 (pn, taskname) = task.rsplit(".", 1)
868 fn = depgraph["pn"][pn]["filename"]
869 version = depgraph["pn"][pn]["version"]
870 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
871 for dep in depgraph["tdepends"][task]:
872 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
873 print("}", file=tdepends_file)
874 logger.info("Task dependencies saved to 'task-depends.dot'")
875
876 def show_appends_with_no_recipes( self ):
877 appends_without_recipes = [self.collection.appendlist[recipe]
878 for recipe in self.collection.appendlist
879 if recipe not in self.collection.appliedappendlist]
880 if appends_without_recipes:
881 appendlines = (' %s' % append
882 for appends in appends_without_recipes
883 for append in appends)
884 msg = 'No recipes available for:\n%s' % '\n'.join(appendlines)
885 warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
886 self.data, False) or "no"
887 if warn_only.lower() in ("1", "yes", "true"):
888 bb.warn(msg)
889 else:
890 bb.fatal(msg)
891
892 def handlePrefProviders(self):
893
894 localdata = data.createCopy(self.data)
895 bb.data.update_data(localdata)
896 bb.data.expandKeys(localdata)
897
898 # Handle PREFERRED_PROVIDERS
899 for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
900 try:
901 (providee, provider) = p.split(':')
902 except:
903 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
904 continue
905 if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
906 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
907 self.recipecache.preferred[providee] = provider
908
909 def findCoreBaseFiles(self, subdir, configfile):
910 corebase = self.data.getVar('COREBASE', True) or ""
911 paths = []
912 for root, dirs, files in os.walk(corebase + '/' + subdir):
913 for d in dirs:
914 configfilepath = os.path.join(root, d, configfile)
915 if os.path.exists(configfilepath):
916 paths.append(os.path.join(root, d))
917
918 if paths:
919 bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
920
921 def findConfigFilePath(self, configfile):
922 """
923 Find the location on disk of configfile and if it exists and was parsed by BitBake
924 emit the ConfigFilePathFound event with the path to the file.
925 """
926 path = bb.cookerdata.findConfigFile(configfile, self.data)
927 if not path:
928 return
929
930 # Generate a list of parsed configuration files by searching the files
931 # listed in the __depends and __base_depends variables with a .conf suffix.
932 conffiles = []
933 dep_files = self.data.getVar('__base_depends') or []
934 dep_files = dep_files + (self.data.getVar('__depends') or [])
935
936 for f in dep_files:
937 if f[0].endswith(".conf"):
938 conffiles.append(f[0])
939
940 _, conf, conffile = path.rpartition("conf/")
941 match = os.path.join(conf, conffile)
942 # Try and find matches for conf/conffilename.conf as we don't always
943 # have the full path to the file.
944 for cfg in conffiles:
945 if cfg.endswith(match):
946 bb.event.fire(bb.event.ConfigFilePathFound(path),
947 self.data)
948 break
949
950 def findFilesMatchingInDir(self, filepattern, directory):
951 """
952 Searches for files matching the regex 'pattern' which are children of
953 'directory' in each BBPATH. i.e. to find all rootfs package classes available
954 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
955 or to find all machine configuration files one could call:
956 findFilesMatchingInDir(self, 'conf/machines', 'conf')
957 """
958
959 matches = []
960 p = re.compile(re.escape(filepattern))
961 bbpaths = self.data.getVar('BBPATH', True).split(':')
962 for path in bbpaths:
963 dirpath = os.path.join(path, directory)
964 if os.path.exists(dirpath):
965 for root, dirs, files in os.walk(dirpath):
966 for f in files:
967 if p.search(f):
968 matches.append(f)
969
970 if matches:
971 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
972
973 def findConfigFiles(self, varname):
974 """
975 Find config files which are appropriate values for varname.
976 i.e. MACHINE, DISTRO
977 """
978 possible = []
979 var = varname.lower()
980
981 data = self.data
982 # iterate configs
983 bbpaths = data.getVar('BBPATH', True).split(':')
984 for path in bbpaths:
985 confpath = os.path.join(path, "conf", var)
986 if os.path.exists(confpath):
987 for root, dirs, files in os.walk(confpath):
988 # get all child files, these are appropriate values
989 for f in files:
990 val, sep, end = f.rpartition('.')
991 if end == 'conf':
992 possible.append(val)
993
994 if possible:
995 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
996
997 def findInheritsClass(self, klass):
998 """
999 Find all recipes which inherit the specified class
1000 """
1001 pkg_list = []
1002
1003 for pfn in self.recipecache.pkg_fn:
1004 inherits = self.recipecache.inherits.get(pfn, None)
1005 if inherits and inherits.count(klass) > 0:
1006 pkg_list.append(self.recipecache.pkg_fn[pfn])
1007
1008 return pkg_list
1009
1010 def generateTargetsTree(self, klass=None, pkgs=[]):
1011 """
1012 Generate a dependency tree of buildable targets
1013 Generate an event with the result
1014 """
1015 # if the caller hasn't specified a pkgs list default to universe
1016 if not len(pkgs):
1017 pkgs = ['universe']
1018 # if inherited_class passed ensure all recipes which inherit the
1019 # specified class are included in pkgs
1020 if klass:
1021 extra_pkgs = self.findInheritsClass(klass)
1022 pkgs = pkgs + extra_pkgs
1023
1024 # generate a dependency tree for all our packages
1025 tree = self.generatePkgDepTreeData(pkgs, 'build')
1026 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1027
1028 def buildWorldTargetList(self):
1029 """
1030 Build package list for "bitbake world"
1031 """
1032 parselog.debug(1, "collating packages for \"world\"")
1033 for f in self.recipecache.possible_world:
1034 terminal = True
1035 pn = self.recipecache.pkg_fn[f]
1036
1037 for p in self.recipecache.pn_provides[pn]:
1038 if p.startswith('virtual/'):
1039 parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
1040 terminal = False
1041 break
1042 for pf in self.recipecache.providers[p]:
1043 if self.recipecache.pkg_fn[pf] != pn:
1044 parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
1045 terminal = False
1046 break
1047 if terminal:
1048 self.recipecache.world_target.add(pn)
1049
1050 def interactiveMode( self ):
1051 """Drop off into a shell"""
1052 try:
1053 from bb import shell
1054 except ImportError:
1055 parselog.exception("Interactive mode not available")
1056 sys.exit(1)
1057 else:
1058 shell.start( self )
1059
1060
1061 def handleCollections( self, collections ):
1062 """Handle collections"""
1063 errors = False
1064 self.recipecache.bbfile_config_priorities = []
1065 if collections:
1066 collection_priorities = {}
1067 collection_depends = {}
1068 collection_list = collections.split()
1069 min_prio = 0
1070 for c in collection_list:
1071 # Get collection priority if defined explicitly
1072 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
1073 if priority:
1074 try:
1075 prio = int(priority)
1076 except ValueError:
1077 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1078 errors = True
1079 if min_prio == 0 or prio < min_prio:
1080 min_prio = prio
1081 collection_priorities[c] = prio
1082 else:
1083 collection_priorities[c] = None
1084
1085 # Check dependencies and store information for priority calculation
1086 deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
1087 if deps:
1088 depnamelist = []
1089 deplist = deps.split()
1090 for dep in deplist:
1091 depsplit = dep.split(':')
1092 if len(depsplit) > 1:
1093 try:
1094 depver = int(depsplit[1])
1095 except ValueError:
1096 parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
1097 errors = True
1098 continue
1099 else:
1100 depver = None
1101 dep = depsplit[0]
1102 depnamelist.append(dep)
1103
1104 if dep in collection_list:
1105 if depver:
1106 layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
1107 if layerver:
1108 try:
1109 lver = int(layerver)
1110 except ValueError:
1111 parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
1112 errors = True
1113 continue
1114 if lver != depver:
1115 parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
1116 errors = True
1117 else:
1118 parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
1119 errors = True
1120 else:
1121 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1122 errors = True
1123 collection_depends[c] = depnamelist
1124 else:
1125 collection_depends[c] = []
1126
1127 # Recursively work out collection priorities based on dependencies
1128 def calc_layer_priority(collection):
1129 if not collection_priorities[collection]:
1130 max_depprio = min_prio
1131 for dep in collection_depends[collection]:
1132 calc_layer_priority(dep)
1133 depprio = collection_priorities[dep]
1134 if depprio > max_depprio:
1135 max_depprio = depprio
1136 max_depprio += 1
1137 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1138 collection_priorities[collection] = max_depprio
1139
1140 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1141 for c in collection_list:
1142 calc_layer_priority(c)
1143 regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
1144 if regex == None:
1145 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1146 errors = True
1147 continue
1148 try:
1149 cre = re.compile(regex)
1150 except re.error:
1151 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1152 errors = True
1153 continue
1154 self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1155 if errors:
1156 # We've already printed the actual error(s)
1157 raise CollectionError("Errors during parsing layer configuration")
1158
1159 def buildSetVars(self):
1160 """
1161 Setup any variables needed before starting a build
1162 """
1163 if not self.data.getVar("BUILDNAME"):
1164 self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'))
1165 self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()))
1166
1167 def matchFiles(self, bf):
1168 """
1169 Find the .bb files which match the expression in 'buildfile'.
1170 """
1171 if bf.startswith("/") or bf.startswith("../"):
1172 bf = os.path.abspath(bf)
1173
1174 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1175 filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data)
1176 try:
1177 os.stat(bf)
1178 bf = os.path.abspath(bf)
1179 return [bf]
1180 except OSError:
1181 regexp = re.compile(bf)
1182 matches = []
1183 for f in filelist:
1184 if regexp.search(f) and os.path.isfile(f):
1185 matches.append(f)
1186 return matches
1187
1188 def matchFile(self, buildfile):
1189 """
1190 Find the .bb file which matches the expression in 'buildfile'.
1191 Raise an error if multiple files
1192 """
1193 matches = self.matchFiles(buildfile)
1194 if len(matches) != 1:
1195 if matches:
1196 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1197 if matches:
1198 for f in matches:
1199 msg += "\n %s" % f
1200 parselog.error(msg)
1201 else:
1202 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1203 raise NoSpecificMatch
1204 return matches[0]
1205
1206 def buildFile(self, buildfile, task):
1207 """
1208 Build the file matching regexp buildfile
1209 """
1210
1211 # Too many people use -b because they think it's how you normally
1212 # specify a target to be built, so show a warning
1213 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1214
1215 # Parse the configuration here. We need to do it explicitly here since
1216 # buildFile() doesn't use the cache
1217 self.parseConfiguration()
1218
1219 # If we are told to do the None task then query the default task
1220 if (task == None):
1221 task = self.configuration.cmd
1222
1223 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
1224 fn = self.matchFile(fn)
1225
1226 self.buildSetVars()
1227
1228 infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
1229 self.data,
1230 self.caches_array)
1231 infos = dict(infos)
1232
1233 fn = bb.cache.Cache.realfn2virtual(fn, cls)
1234 try:
1235 info_array = infos[fn]
1236 except KeyError:
1237 bb.fatal("%s does not exist" % fn)
1238
1239 if info_array[0].skipped:
1240 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1241
1242 self.recipecache.add_from_recipeinfo(fn, info_array)
1243
1244 # Tweak some variables
1245 item = info_array[0].pn
1246 self.recipecache.ignored_dependencies = set()
1247 self.recipecache.bbfile_priority[fn] = 1
1248
1249 # Remove external dependencies
1250 self.recipecache.task_deps[fn]['depends'] = {}
1251 self.recipecache.deps[fn] = []
1252 self.recipecache.rundeps[fn] = []
1253 self.recipecache.runrecs[fn] = []
1254
1255 # Invalidate task for target if force mode active
1256 if self.configuration.force:
1257 logger.verbose("Invalidate task %s, %s", task, fn)
1258 bb.parse.siggen.invalidate_task('do_%s' % task, self.recipecache, fn)
1259
1260 # Setup taskdata structure
1261 taskdata = bb.taskdata.TaskData(self.configuration.abort)
1262 taskdata.add_provider(self.data, self.recipecache, item)
1263
1264 buildname = self.data.getVar("BUILDNAME")
1265 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data)
1266
1267 # Execute the runqueue
1268 runlist = [[item, "do_%s" % task]]
1269
1270 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1271
1272 def buildFileIdle(server, rq, abort):
1273
1274 msg = None
1275 if abort or self.state == state.forceshutdown:
1276 rq.finish_runqueue(True)
1277 msg = "Forced shutdown"
1278 elif self.state == state.shutdown:
1279 rq.finish_runqueue(False)
1280 msg = "Stopped build"
1281 failures = 0
1282 try:
1283 retval = rq.execute_runqueue()
1284 except runqueue.TaskFailure as exc:
1285 failures += len(exc.args)
1286 retval = False
1287 except SystemExit as exc:
1288 self.command.finishAsyncCommand()
1289 return False
1290
1291 if not retval:
1292 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data)
1293 self.command.finishAsyncCommand(msg)
1294 return False
1295 if retval is True:
1296 return True
1297 return retval
1298
1299 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1300
1301 def buildTargets(self, targets, task):
1302 """
1303 Attempt to build the targets specified
1304 """
1305
1306 def buildTargetsIdle(server, rq, abort):
1307 msg = None
1308 if abort or self.state == state.forceshutdown:
1309 rq.finish_runqueue(True)
1310 msg = "Forced shutdown"
1311 elif self.state == state.shutdown:
1312 rq.finish_runqueue(False)
1313 msg = "Stopped build"
1314 failures = 0
1315 try:
1316 retval = rq.execute_runqueue()
1317 except runqueue.TaskFailure as exc:
1318 failures += len(exc.args)
1319 retval = False
1320 except SystemExit as exc:
1321 self.command.finishAsyncCommand()
1322 return False
1323
1324 if not retval:
1325 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data)
1326 self.command.finishAsyncCommand(msg)
1327 return False
1328 if retval is True:
1329 return True
1330 return retval
1331
1332 self.buildSetVars()
1333
1334 taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
1335
1336 buildname = self.data.getVar("BUILDNAME")
1337 bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data)
1338
1339 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1340 if 'universe' in targets:
1341 rq.rqdata.warn_multi_bb = True
1342
1343 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1344
1345
1346 def getAllKeysWithFlags(self, flaglist):
1347 dump = {}
1348 for k in self.data.keys():
1349 try:
1350 v = self.data.getVar(k, True)
1351 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1352 dump[k] = {
1353 'v' : v ,
1354 'history' : self.data.varhistory.variable(k),
1355 }
1356 for d in flaglist:
1357 dump[k][d] = self.data.getVarFlag(k, d)
1358 except Exception as e:
1359 print(e)
1360 return dump
1361
1362
1363 def generateNewImage(self, image, base_image, package_queue, timestamp, description):
1364 '''
1365 Create a new image with a "require"/"inherit" base_image statement
1366 '''
1367 if timestamp:
1368 image_name = os.path.splitext(image)[0]
1369 timestr = time.strftime("-%Y%m%d-%H%M%S")
1370 dest = image_name + str(timestr) + ".bb"
1371 else:
1372 if not image.endswith(".bb"):
1373 dest = image + ".bb"
1374 else:
1375 dest = image
1376
1377 basename = False
1378 if base_image:
1379 with open(base_image, 'r') as f:
1380 require_line = f.readline()
1381 p = re.compile("IMAGE_BASENAME *=")
1382 for line in f:
1383 if p.search(line):
1384 basename = True
1385
1386 with open(dest, "w") as imagefile:
1387 if base_image is None:
1388 imagefile.write("inherit core-image\n")
1389 else:
1390 topdir = self.data.getVar("TOPDIR")
1391 if topdir in base_image:
1392 base_image = require_line.split()[1]
1393 imagefile.write("require " + base_image + "\n")
1394 image_install = "IMAGE_INSTALL = \""
1395 for package in package_queue:
1396 image_install += str(package) + " "
1397 image_install += "\"\n"
1398 imagefile.write(image_install)
1399
1400 description_var = "DESCRIPTION = \"" + description + "\"\n"
1401 imagefile.write(description_var)
1402
1403 if basename:
1404 # If this is overwritten in a inherited image, reset it to default
1405 image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
1406 imagefile.write(image_basename)
1407
1408 self.state = state.initial
1409 if timestamp:
1410 return timestr
1411
1412 # This is called for all async commands when self.state != running
1413 def updateCache(self):
1414 if self.state == state.running:
1415 return
1416
1417 if self.state in (state.shutdown, state.forceshutdown, state.error):
1418 if hasattr(self.parser, 'shutdown'):
1419 self.parser.shutdown(clean=False, force = True)
1420 raise bb.BBHandledException()
1421
1422 if self.state != state.parsing:
1423
1424 # reload files for which we got notifications
1425 for p in self.inotify_modified_files:
1426 bb.parse.update_cache(p)
1427 self.inotify_modified_files = []
1428
1429 if not self.baseconfig_valid:
1430 logger.debug(1, "Reloading base configuration data")
1431 self.initConfigurationData()
1432 self.baseconfig_valid = True
1433 self.parsecache_valid = False
1434
1435 if self.state != state.parsing and not self.parsecache_valid:
1436 self.parseConfiguration ()
1437 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1438 bb.event.fire(bb.event.SanityCheck(False), self.data)
1439
1440 ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
1441 self.recipecache.ignored_dependencies = set(ignore.split())
1442
1443 for dep in self.configuration.extra_assume_provided:
1444 self.recipecache.ignored_dependencies.add(dep)
1445
1446 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1447 (filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data)
1448
1449 self.data.renameVar("__depends", "__base_depends")
1450 self.add_filewatch(self.data.getVar("__base_depends"), self.configwatcher)
1451
1452 self.parser = CookerParser(self, filelist, masked)
1453 self.parsecache_valid = True
1454
1455 self.state = state.parsing
1456
1457 if not self.parser.parse_next():
1458 collectlog.debug(1, "parsing complete")
1459 if self.parser.error:
1460 raise bb.BBHandledException()
1461 self.show_appends_with_no_recipes()
1462 self.handlePrefProviders()
1463 self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
1464 self.state = state.running
1465 return None
1466
1467 return True
1468
1469 def checkPackages(self, pkgs_to_build):
1470
1471 # Return a copy, don't modify the original
1472 pkgs_to_build = pkgs_to_build[:]
1473
1474 if len(pkgs_to_build) == 0:
1475 raise NothingToBuild
1476
1477 ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split()
1478 for pkg in pkgs_to_build:
1479 if pkg in ignore:
1480 parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1481
1482 if 'world' in pkgs_to_build:
1483 self.buildWorldTargetList()
1484 pkgs_to_build.remove('world')
1485 for t in self.recipecache.world_target:
1486 pkgs_to_build.append(t)
1487
1488 if 'universe' in pkgs_to_build:
1489 parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
1490 parselog.debug(1, "collating packages for \"universe\"")
1491 pkgs_to_build.remove('universe')
1492 for t in self.recipecache.universe_target:
1493 pkgs_to_build.append(t)
1494
1495 return pkgs_to_build
1496
1497
1498
1499
1500 def pre_serve(self):
1501 # Empty the environment. The environment will be populated as
1502 # necessary from the data store.
1503 #bb.utils.empty_environment()
1504 try:
1505 self.prhost = prserv.serv.auto_start(self.data)
1506 except prserv.serv.PRServiceConfigError:
1507 bb.event.fire(CookerExit(), self.event_data)
1508 self.state = state.error
1509 return
1510
1511 def post_serve(self):
1512 prserv.serv.auto_shutdown(self.data)
1513 bb.event.fire(CookerExit(), self.event_data)
1514
1515 def shutdown(self, force = False):
1516 if force:
1517 self.state = state.forceshutdown
1518 else:
1519 self.state = state.shutdown
1520
1521 def finishcommand(self):
1522 self.state = state.initial
1523
1524 def reset(self):
1525 self.initConfigurationData()
1526
1527def server_main(cooker, func, *args):
1528 cooker.pre_serve()
1529
1530 if cooker.configuration.profile:
1531 try:
1532 import cProfile as profile
1533 except:
1534 import profile
1535 prof = profile.Profile()
1536
1537 ret = profile.Profile.runcall(prof, func, *args)
1538
1539 prof.dump_stats("profile.log")
1540 bb.utils.process_profilelog("profile.log")
1541 print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
1542
1543 else:
1544 ret = func(*args)
1545
1546 cooker.post_serve()
1547
1548 return ret
1549
1550class CookerExit(bb.event.Event):
1551 """
1552 Notify clients of the Cooker shutdown
1553 """
1554
1555 def __init__(self):
1556 bb.event.Event.__init__(self)
1557
1558
1559class CookerCollectFiles(object):
1560 def __init__(self, priorities):
1561 self.appendlist = {}
1562 self.appliedappendlist = []
1563 self.bbfile_config_priorities = priorities
1564
1565 def calc_bbfile_priority( self, filename, matched = None ):
1566 for _, _, regex, pri in self.bbfile_config_priorities:
1567 if regex.match(filename):
1568 if matched != None:
1569 if not regex in matched:
1570 matched.add(regex)
1571 return pri
1572 return 0
1573
1574 def get_bbfiles(self):
1575 """Get list of default .bb files by reading out the current directory"""
1576 path = os.getcwd()
1577 contents = os.listdir(path)
1578 bbfiles = []
1579 for f in contents:
1580 if f.endswith(".bb"):
1581 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1582 return bbfiles
1583
1584 def find_bbfiles(self, path):
1585 """Find all the .bb and .bbappend files in a directory"""
1586 found = []
1587 for dir, dirs, files in os.walk(path):
1588 for ignored in ('SCCS', 'CVS', '.svn'):
1589 if ignored in dirs:
1590 dirs.remove(ignored)
1591 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1592
1593 return found
1594
1595 def collect_bbfiles(self, config, eventdata):
1596 """Collect all available .bb build files"""
1597 masked = 0
1598
1599 collectlog.debug(1, "collecting .bb files")
1600
1601 files = (config.getVar( "BBFILES", True) or "").split()
1602 config.setVar("BBFILES", " ".join(files))
1603
1604 # Sort files by priority
1605 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1606
1607 if not len(files):
1608 files = self.get_bbfiles()
1609
1610 if not len(files):
1611 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1612 bb.event.fire(CookerExit(), eventdata)
1613
1614 # Can't use set here as order is important
1615 newfiles = []
1616 for f in files:
1617 if os.path.isdir(f):
1618 dirfiles = self.find_bbfiles(f)
1619 for g in dirfiles:
1620 if g not in newfiles:
1621 newfiles.append(g)
1622 else:
1623 globbed = glob.glob(f)
1624 if not globbed and os.path.exists(f):
1625 globbed = [f]
1626 for g in globbed:
1627 if g not in newfiles:
1628 newfiles.append(g)
1629
1630 bbmask = config.getVar('BBMASK', True)
1631
1632 if bbmask:
1633 try:
1634 bbmask_compiled = re.compile(bbmask)
1635 except sre_constants.error:
1636 collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
1637 return list(newfiles), 0
1638
1639 bbfiles = []
1640 bbappend = []
1641 for f in newfiles:
1642 if bbmask and bbmask_compiled.search(f):
1643 collectlog.debug(1, "skipping masked file %s", f)
1644 masked += 1
1645 continue
1646 if f.endswith('.bb'):
1647 bbfiles.append(f)
1648 elif f.endswith('.bbappend'):
1649 bbappend.append(f)
1650 else:
1651 collectlog.debug(1, "skipping %s: unknown file extension", f)
1652
1653 # Build a list of .bbappend files for each .bb file
1654 for f in bbappend:
1655 base = os.path.basename(f).replace('.bbappend', '.bb')
1656 if not base in self.appendlist:
1657 self.appendlist[base] = []
1658 if f not in self.appendlist[base]:
1659 self.appendlist[base].append(f)
1660
1661 # Find overlayed recipes
1662 # bbfiles will be in priority order which makes this easy
1663 bbfile_seen = dict()
1664 self.overlayed = defaultdict(list)
1665 for f in reversed(bbfiles):
1666 base = os.path.basename(f)
1667 if base not in bbfile_seen:
1668 bbfile_seen[base] = f
1669 else:
1670 topfile = bbfile_seen[base]
1671 self.overlayed[topfile].append(f)
1672
1673 return (bbfiles, masked)
1674
1675 def get_file_appends(self, fn):
1676 """
1677 Returns a list of .bbappend files to apply to fn
1678 """
1679 filelist = []
1680 f = os.path.basename(fn)
1681 for bbappend in self.appendlist:
1682 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1683 self.appliedappendlist.append(bbappend)
1684 for filename in self.appendlist[bbappend]:
1685 filelist.append(filename)
1686 return filelist
1687
1688 def collection_priorities(self, pkgfns):
1689
1690 priorities = {}
1691
1692 # Calculate priorities for each file
1693 matched = set()
1694 for p in pkgfns:
1695 realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
1696 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1697
1698 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1699 unmatched = set()
1700 for _, _, regex, pri in self.bbfile_config_priorities:
1701 if not regex in matched:
1702 unmatched.add(regex)
1703
1704 def findmatch(regex):
1705 for bbfile in self.appendlist:
1706 for append in self.appendlist[bbfile]:
1707 if regex.match(append):
1708 return True
1709 return False
1710
1711 for unmatch in unmatched.copy():
1712 if findmatch(unmatch):
1713 unmatched.remove(unmatch)
1714
1715 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1716 if regex in unmatched:
1717 collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
1718
1719 return priorities
1720
1721class ParsingFailure(Exception):
1722 def __init__(self, realexception, recipe):
1723 self.realexception = realexception
1724 self.recipe = recipe
1725 Exception.__init__(self, realexception, recipe)
1726
1727class Feeder(multiprocessing.Process):
1728 def __init__(self, jobs, to_parsers, quit):
1729 self.quit = quit
1730 self.jobs = jobs
1731 self.to_parsers = to_parsers
1732 multiprocessing.Process.__init__(self)
1733
1734 def run(self):
1735 while True:
1736 try:
1737 quit = self.quit.get_nowait()
1738 except Queue.Empty:
1739 pass
1740 else:
1741 if quit == 'cancel':
1742 self.to_parsers.cancel_join_thread()
1743 break
1744
1745 try:
1746 job = self.jobs.pop()
1747 except IndexError:
1748 break
1749
1750 try:
1751 self.to_parsers.put(job, timeout=0.5)
1752 except Queue.Full:
1753 self.jobs.insert(0, job)
1754 continue
1755
1756class Parser(multiprocessing.Process):
1757 def __init__(self, jobs, results, quit, init, profile):
1758 self.jobs = jobs
1759 self.results = results
1760 self.quit = quit
1761 self.init = init
1762 multiprocessing.Process.__init__(self)
1763 self.context = bb.utils.get_context().copy()
1764 self.handlers = bb.event.get_class_handlers().copy()
1765 self.profile = profile
1766
1767 def run(self):
1768
1769 if not self.profile:
1770 self.realrun()
1771 return
1772
1773 try:
1774 import cProfile as profile
1775 except:
1776 import profile
1777 prof = profile.Profile()
1778 try:
1779 profile.Profile.runcall(prof, self.realrun)
1780 finally:
1781 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1782 prof.dump_stats(logfile)
1783 bb.utils.process_profilelog(logfile)
1784 print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile))
1785
1786 def realrun(self):
1787 if self.init:
1788 self.init()
1789
1790 pending = []
1791 while True:
1792 try:
1793 self.quit.get_nowait()
1794 except Queue.Empty:
1795 pass
1796 else:
1797 self.results.cancel_join_thread()
1798 break
1799
1800 if pending:
1801 result = pending.pop()
1802 else:
1803 try:
1804 job = self.jobs.get(timeout=0.25)
1805 except Queue.Empty:
1806 continue
1807
1808 if job is None:
1809 break
1810 result = self.parse(*job)
1811
1812 try:
1813 self.results.put(result, timeout=0.25)
1814 except Queue.Full:
1815 pending.append(result)
1816
1817 def parse(self, filename, appends, caches_array):
1818 try:
1819 # Reset our environment and handlers to the original settings
1820 bb.utils.set_context(self.context.copy())
1821 bb.event.set_class_handlers(self.handlers.copy())
1822 return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
1823 except Exception as exc:
1824 tb = sys.exc_info()[2]
1825 exc.recipe = filename
1826 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1827 return True, exc
1828 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1829 # and for example a worker thread doesn't just exit on its own in response to
1830 # a SystemExit event for example.
1831 except BaseException as exc:
1832 return True, ParsingFailure(exc, filename)
1833
1834class CookerParser(object):
1835 def __init__(self, cooker, filelist, masked):
1836 self.filelist = filelist
1837 self.cooker = cooker
1838 self.cfgdata = cooker.data
1839 self.cfghash = cooker.data_hash
1840
1841 # Accounting statistics
1842 self.parsed = 0
1843 self.cached = 0
1844 self.error = 0
1845 self.masked = masked
1846
1847 self.skipped = 0
1848 self.virtuals = 0
1849 self.total = len(filelist)
1850
1851 self.current = 0
1852 self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
1853 multiprocessing.cpu_count())
1854
1855 self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
1856 self.fromcache = []
1857 self.willparse = []
1858 for filename in self.filelist:
1859 appends = self.cooker.collection.get_file_appends(filename)
1860 if not self.bb_cache.cacheValid(filename, appends):
1861 self.willparse.append((filename, appends, cooker.caches_array))
1862 else:
1863 self.fromcache.append((filename, appends))
1864 self.toparse = self.total - len(self.fromcache)
1865 self.progress_chunk = max(self.toparse / 100, 1)
1866
1867 self.start()
1868 self.haveshutdown = False
1869
1870 def start(self):
1871 self.results = self.load_cached()
1872 self.processes = []
1873 if self.toparse:
1874 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
1875 def init():
1876 Parser.cfg = self.cfgdata
1877 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
1878 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1)
1879
1880 self.feeder_quit = multiprocessing.Queue(maxsize=1)
1881 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
1882 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
1883 self.result_queue = multiprocessing.Queue()
1884 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
1885 self.feeder.start()
1886 for i in range(0, self.num_processes):
1887 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
1888 parser.start()
1889 self.processes.append(parser)
1890
1891 self.results = itertools.chain(self.results, self.parse_generator())
1892
1893 def shutdown(self, clean=True, force=False):
1894 if not self.toparse:
1895 return
1896 if self.haveshutdown:
1897 return
1898 self.haveshutdown = True
1899
1900 if clean:
1901 event = bb.event.ParseCompleted(self.cached, self.parsed,
1902 self.skipped, self.masked,
1903 self.virtuals, self.error,
1904 self.total)
1905
1906 bb.event.fire(event, self.cfgdata)
1907 self.feeder_quit.put(None)
1908 for process in self.processes:
1909 self.jobs.put(None)
1910 else:
1911 self.feeder_quit.put('cancel')
1912
1913 self.parser_quit.cancel_join_thread()
1914 for process in self.processes:
1915 self.parser_quit.put(None)
1916
1917 self.jobs.cancel_join_thread()
1918
1919 for process in self.processes:
1920 if force:
1921 process.join(.1)
1922 process.terminate()
1923 else:
1924 process.join()
1925 self.feeder.join()
1926
1927 sync = threading.Thread(target=self.bb_cache.sync)
1928 sync.start()
1929 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
1930 bb.codeparser.parser_cache_savemerge(self.cooker.data)
1931 bb.fetch.fetcher_parse_done(self.cooker.data)
1932
1933 def load_cached(self):
1934 for filename, appends in self.fromcache:
1935 cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
1936 yield not cached, infos
1937
1938 def parse_generator(self):
1939 while True:
1940 if self.parsed >= self.toparse:
1941 break
1942
1943 try:
1944 result = self.result_queue.get(timeout=0.25)
1945 except Queue.Empty:
1946 pass
1947 else:
1948 value = result[1]
1949 if isinstance(value, BaseException):
1950 raise value
1951 else:
1952 yield result
1953
1954 def parse_next(self):
1955 result = []
1956 parsed = None
1957 try:
1958 parsed, result = self.results.next()
1959 except StopIteration:
1960 self.shutdown()
1961 return False
1962 except bb.BBHandledException as exc:
1963 self.error += 1
1964 logger.error('Failed to parse recipe: %s' % exc.recipe)
1965 self.shutdown(clean=False)
1966 return False
1967 except ParsingFailure as exc:
1968 self.error += 1
1969 logger.error('Unable to parse %s: %s' %
1970 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
1971 self.shutdown(clean=False)
1972 return False
1973 except bb.parse.ParseError as exc:
1974 self.error += 1
1975 logger.error(str(exc))
1976 self.shutdown(clean=False)
1977 return False
1978 except bb.data_smart.ExpansionError as exc:
1979 self.error += 1
1980 _, value, _ = sys.exc_info()
1981 logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
1982 self.shutdown(clean=False)
1983 return False
1984 except SyntaxError as exc:
1985 self.error += 1
1986 logger.error('Unable to parse %s', exc.recipe)
1987 self.shutdown(clean=False)
1988 return False
1989 except Exception as exc:
1990 self.error += 1
1991 etype, value, tb = sys.exc_info()
1992 if hasattr(value, "recipe"):
1993 logger.error('Unable to parse %s', value.recipe,
1994 exc_info=(etype, value, exc.traceback))
1995 else:
1996 # Most likely, an exception occurred during raising an exception
1997 import traceback
1998 logger.error('Exception during parse: %s' % traceback.format_exc())
1999 self.shutdown(clean=False)
2000 return False
2001
2002 self.current += 1
2003 self.virtuals += len(result)
2004 if parsed:
2005 self.parsed += 1
2006 if self.parsed % self.progress_chunk == 0:
2007 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2008 self.cfgdata)
2009 else:
2010 self.cached += 1
2011
2012 for virtualfn, info_array in result:
2013 if info_array[0].skipped:
2014 self.skipped += 1
2015 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2016 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
2017 parsed=parsed, watcher = self.cooker.add_filewatch)
2018 return True
2019
2020 def reparse(self, filename):
2021 infos = self.bb_cache.parse(filename,
2022 self.cooker.collection.get_file_appends(filename),
2023 self.cfgdata, self.cooker.caches_array)
2024 for vfn, info_array in infos:
2025 self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)