diff options
author | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
---|---|---|
committer | Tudor Florea <tudor.florea@enea.com> | 2014-10-16 03:05:19 +0200 |
commit | c527fd1f14c27855a37f2e8ac5346ce8d940ced2 (patch) | |
tree | bb002c1fdf011c41dbd2f0927bed23ecb5f83c97 /bitbake/lib/bb/cooker.py | |
download | poky-c527fd1f14c27855a37f2e8ac5346ce8d940ced2.tar.gz |
initial commit for Enea Linux 4.0-140929daisy-140929
Migrated from the internal git server on the daisy-enea-point-release branch
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'bitbake/lib/bb/cooker.py')
-rw-r--r-- | bitbake/lib/bb/cooker.py | 1874 |
1 files changed, 1874 insertions, 0 deletions
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py new file mode 100644 index 0000000000..f44a08889a --- /dev/null +++ b/bitbake/lib/bb/cooker.py | |||
@@ -0,0 +1,1874 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | # | ||
5 | # Copyright (C) 2003, 2004 Chris Larson | ||
6 | # Copyright (C) 2003, 2004 Phil Blundell | ||
7 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
8 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
9 | # Copyright (C) 2005 ROAD GmbH | ||
10 | # Copyright (C) 2006 - 2007 Richard Purdie | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify | ||
13 | # it under the terms of the GNU General Public License version 2 as | ||
14 | # published by the Free Software Foundation. | ||
15 | # | ||
16 | # This program is distributed in the hope that it will be useful, | ||
17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
19 | # GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along | ||
22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
24 | |||
25 | from __future__ import print_function | ||
26 | import sys, os, glob, os.path, re, time | ||
27 | import atexit | ||
28 | import itertools | ||
29 | import logging | ||
30 | import multiprocessing | ||
31 | import sre_constants | ||
32 | import threading | ||
33 | from cStringIO import StringIO | ||
34 | from contextlib import closing | ||
35 | from functools import wraps | ||
36 | from collections import defaultdict | ||
37 | import bb, bb.exceptions, bb.command | ||
38 | from bb import utils, data, parse, event, cache, providers, taskdata, runqueue | ||
39 | import Queue | ||
40 | import signal | ||
41 | import prserv.serv | ||
42 | |||
43 | logger = logging.getLogger("BitBake") | ||
44 | collectlog = logging.getLogger("BitBake.Collection") | ||
45 | buildlog = logging.getLogger("BitBake.Build") | ||
46 | parselog = logging.getLogger("BitBake.Parsing") | ||
47 | providerlog = logging.getLogger("BitBake.Provider") | ||
48 | |||
49 | class NoSpecificMatch(bb.BBHandledException): | ||
50 | """ | ||
51 | Exception raised when no or multiple file matches are found | ||
52 | """ | ||
53 | |||
54 | class NothingToBuild(Exception): | ||
55 | """ | ||
56 | Exception raised when there is nothing to build | ||
57 | """ | ||
58 | |||
59 | class CollectionError(bb.BBHandledException): | ||
60 | """ | ||
61 | Exception raised when layer configuration is incorrect | ||
62 | """ | ||
63 | |||
64 | class state: | ||
65 | initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7) | ||
66 | |||
67 | |||
68 | class SkippedPackage: | ||
69 | def __init__(self, info = None, reason = None): | ||
70 | self.pn = None | ||
71 | self.skipreason = None | ||
72 | self.provides = None | ||
73 | self.rprovides = None | ||
74 | |||
75 | if info: | ||
76 | self.pn = info.pn | ||
77 | self.skipreason = info.skipreason | ||
78 | self.provides = info.provides | ||
79 | self.rprovides = info.rprovides | ||
80 | elif reason: | ||
81 | self.skipreason = reason | ||
82 | |||
83 | |||
84 | class CookerFeatures(object): | ||
85 | _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4) | ||
86 | |||
87 | def __init__(self): | ||
88 | self._features=set() | ||
89 | |||
90 | def setFeature(self, f): | ||
91 | # validate we got a request for a feature we support | ||
92 | if f not in CookerFeatures._feature_list: | ||
93 | return | ||
94 | self._features.add(f) | ||
95 | |||
96 | def __contains__(self, f): | ||
97 | return f in self._features | ||
98 | |||
99 | def __iter__(self): | ||
100 | return self._features.__iter__() | ||
101 | |||
102 | def next(self): | ||
103 | return self._features.next() | ||
104 | |||
105 | |||
106 | #============================================================================# | ||
107 | # BBCooker | ||
108 | #============================================================================# | ||
109 | class BBCooker: | ||
110 | """ | ||
111 | Manages one bitbake build run | ||
112 | """ | ||
113 | |||
114 | def __init__(self, configuration, featureSet = []): | ||
115 | self.recipecache = None | ||
116 | self.skiplist = {} | ||
117 | self.featureset = CookerFeatures() | ||
118 | for f in featureSet: | ||
119 | self.featureset.setFeature(f) | ||
120 | |||
121 | self.configuration = configuration | ||
122 | |||
123 | self.initConfigurationData() | ||
124 | |||
125 | # Take a lock so only one copy of bitbake can run against a given build | ||
126 | # directory at a time | ||
127 | lockfile = self.data.expand("${TOPDIR}/bitbake.lock") | ||
128 | self.lock = bb.utils.lockfile(lockfile, False, False) | ||
129 | if not self.lock: | ||
130 | bb.fatal("Only one copy of bitbake should be run against a build directory") | ||
131 | try: | ||
132 | self.lock.seek(0) | ||
133 | self.lock.truncate() | ||
134 | if len(configuration.interface) >= 2: | ||
135 | self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1])); | ||
136 | self.lock.flush() | ||
137 | except: | ||
138 | pass | ||
139 | |||
140 | # TOSTOP must not be set or our children will hang when they output | ||
141 | fd = sys.stdout.fileno() | ||
142 | if os.isatty(fd): | ||
143 | import termios | ||
144 | tcattr = termios.tcgetattr(fd) | ||
145 | if tcattr[3] & termios.TOSTOP: | ||
146 | buildlog.info("The terminal had the TOSTOP bit set, clearing...") | ||
147 | tcattr[3] = tcattr[3] & ~termios.TOSTOP | ||
148 | termios.tcsetattr(fd, termios.TCSANOW, tcattr) | ||
149 | |||
150 | self.command = bb.command.Command(self) | ||
151 | self.state = state.initial | ||
152 | |||
153 | self.parser = None | ||
154 | |||
155 | signal.signal(signal.SIGTERM, self.sigterm_exception) | ||
156 | |||
157 | def sigterm_exception(self, signum, stackframe): | ||
158 | bb.warn("Cooker recieved SIGTERM, shutting down...") | ||
159 | self.state = state.forceshutdown | ||
160 | |||
161 | def setFeatures(self, features): | ||
162 | # we only accept a new feature set if we're in state initial, so we can reset without problems | ||
163 | if self.state != state.initial: | ||
164 | raise Exception("Illegal state for feature set change") | ||
165 | original_featureset = list(self.featureset) | ||
166 | for feature in features: | ||
167 | self.featureset.setFeature(feature) | ||
168 | bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) | ||
169 | if (original_featureset != list(self.featureset)): | ||
170 | self.reset() | ||
171 | |||
172 | def initConfigurationData(self): | ||
173 | |||
174 | self.state = state.initial | ||
175 | self.caches_array = [] | ||
176 | |||
177 | if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: | ||
178 | self.enableDataTracking() | ||
179 | |||
180 | all_extra_cache_names = [] | ||
181 | # We hardcode all known cache types in a single place, here. | ||
182 | if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: | ||
183 | all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") | ||
184 | |||
185 | caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names | ||
186 | |||
187 | # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! | ||
188 | # This is the entry point, no further check needed! | ||
189 | for var in caches_name_array: | ||
190 | try: | ||
191 | module_name, cache_name = var.split(':') | ||
192 | module = __import__(module_name, fromlist=(cache_name,)) | ||
193 | self.caches_array.append(getattr(module, cache_name)) | ||
194 | except ImportError as exc: | ||
195 | logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc)) | ||
196 | sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name) | ||
197 | |||
198 | self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) | ||
199 | self.databuilder.parseBaseConfiguration() | ||
200 | self.data = self.databuilder.data | ||
201 | self.data_hash = self.databuilder.data_hash | ||
202 | |||
203 | # | ||
204 | # Special updated configuration we use for firing events | ||
205 | # | ||
206 | self.event_data = bb.data.createCopy(self.data) | ||
207 | bb.data.update_data(self.event_data) | ||
208 | bb.parse.init_parser(self.event_data) | ||
209 | |||
210 | if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: | ||
211 | self.disableDataTracking() | ||
212 | |||
213 | def enableDataTracking(self): | ||
214 | self.configuration.tracking = True | ||
215 | if hasattr(self, "data"): | ||
216 | self.data.enableTracking() | ||
217 | |||
218 | def disableDataTracking(self): | ||
219 | self.configuration.tracking = False | ||
220 | if hasattr(self, "data"): | ||
221 | self.data.disableTracking() | ||
222 | |||
223 | def modifyConfigurationVar(self, var, val, default_file, op): | ||
224 | if op == "append": | ||
225 | self.appendConfigurationVar(var, val, default_file) | ||
226 | elif op == "set": | ||
227 | self.saveConfigurationVar(var, val, default_file, "=") | ||
228 | elif op == "earlyAssign": | ||
229 | self.saveConfigurationVar(var, val, default_file, "?=") | ||
230 | |||
231 | |||
232 | def appendConfigurationVar(self, var, val, default_file): | ||
233 | #add append var operation to the end of default_file | ||
234 | default_file = bb.cookerdata.findConfigFile(default_file, self.data) | ||
235 | |||
236 | total = "#added by hob" | ||
237 | total += "\n%s += \"%s\"\n" % (var, val) | ||
238 | |||
239 | with open(default_file, 'a') as f: | ||
240 | f.write(total) | ||
241 | |||
242 | #add to history | ||
243 | loginfo = {"op":append, "file":default_file, "line":total.count("\n")} | ||
244 | self.data.appendVar(var, val, **loginfo) | ||
245 | |||
246 | def saveConfigurationVar(self, var, val, default_file, op): | ||
247 | |||
248 | replaced = False | ||
249 | #do not save if nothing changed | ||
250 | if str(val) == self.data.getVar(var): | ||
251 | return | ||
252 | |||
253 | conf_files = self.data.varhistory.get_variable_files(var) | ||
254 | |||
255 | #format the value when it is a list | ||
256 | if isinstance(val, list): | ||
257 | listval = "" | ||
258 | for value in val: | ||
259 | listval += "%s " % value | ||
260 | val = listval | ||
261 | |||
262 | topdir = self.data.getVar("TOPDIR") | ||
263 | |||
264 | #comment or replace operations made on var | ||
265 | for conf_file in conf_files: | ||
266 | if topdir in conf_file: | ||
267 | with open(conf_file, 'r') as f: | ||
268 | contents = f.readlines() | ||
269 | |||
270 | lines = self.data.varhistory.get_variable_lines(var, conf_file) | ||
271 | for line in lines: | ||
272 | total = "" | ||
273 | i = 0 | ||
274 | for c in contents: | ||
275 | total += c | ||
276 | i = i + 1 | ||
277 | if i==int(line): | ||
278 | end_index = len(total) | ||
279 | index = total.rfind(var, 0, end_index) | ||
280 | |||
281 | begin_line = total.count("\n",0,index) | ||
282 | end_line = int(line) | ||
283 | |||
284 | #check if the variable was saved before in the same way | ||
285 | #if true it replace the place where the variable was declared | ||
286 | #else it comments it | ||
287 | if contents[begin_line-1]== "#added by hob\n": | ||
288 | contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val) | ||
289 | replaced = True | ||
290 | else: | ||
291 | for ii in range(begin_line, end_line): | ||
292 | contents[ii] = "#" + contents[ii] | ||
293 | |||
294 | with open(conf_file, 'w') as f: | ||
295 | f.writelines(contents) | ||
296 | |||
297 | if replaced == False: | ||
298 | #remove var from history | ||
299 | self.data.varhistory.del_var_history(var) | ||
300 | |||
301 | #add var to the end of default_file | ||
302 | default_file = bb.cookerdata.findConfigFile(default_file, self.data) | ||
303 | |||
304 | #add the variable on a single line, to be easy to replace the second time | ||
305 | total = "\n#added by hob" | ||
306 | total += "\n%s %s \"%s\"\n" % (var, op, val) | ||
307 | |||
308 | with open(default_file, 'a') as f: | ||
309 | f.write(total) | ||
310 | |||
311 | #add to history | ||
312 | loginfo = {"op":set, "file":default_file, "line":total.count("\n")} | ||
313 | self.data.setVar(var, val, **loginfo) | ||
314 | |||
315 | def removeConfigurationVar(self, var): | ||
316 | conf_files = self.data.varhistory.get_variable_files(var) | ||
317 | topdir = self.data.getVar("TOPDIR") | ||
318 | |||
319 | for conf_file in conf_files: | ||
320 | if topdir in conf_file: | ||
321 | with open(conf_file, 'r') as f: | ||
322 | contents = f.readlines() | ||
323 | |||
324 | lines = self.data.varhistory.get_variable_lines(var, conf_file) | ||
325 | for line in lines: | ||
326 | total = "" | ||
327 | i = 0 | ||
328 | for c in contents: | ||
329 | total += c | ||
330 | i = i + 1 | ||
331 | if i==int(line): | ||
332 | end_index = len(total) | ||
333 | index = total.rfind(var, 0, end_index) | ||
334 | |||
335 | begin_line = total.count("\n",0,index) | ||
336 | |||
337 | #check if the variable was saved before in the same way | ||
338 | if contents[begin_line-1]== "#added by hob\n": | ||
339 | contents[begin_line-1] = contents[begin_line] = "\n" | ||
340 | else: | ||
341 | contents[begin_line] = "\n" | ||
342 | #remove var from history | ||
343 | self.data.varhistory.del_var_history(var, conf_file, line) | ||
344 | #remove variable | ||
345 | self.data.delVar(var) | ||
346 | |||
347 | with open(conf_file, 'w') as f: | ||
348 | f.writelines(contents) | ||
349 | |||
350 | def createConfigFile(self, name): | ||
351 | path = os.getcwd() | ||
352 | confpath = os.path.join(path, "conf", name) | ||
353 | open(confpath, 'w').close() | ||
354 | |||
355 | def parseConfiguration(self): | ||
356 | # Set log file verbosity | ||
357 | verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0")) | ||
358 | if verboselogs: | ||
359 | bb.msg.loggerVerboseLogs = True | ||
360 | |||
361 | # Change nice level if we're asked to | ||
362 | nice = self.data.getVar("BB_NICE_LEVEL", True) | ||
363 | if nice: | ||
364 | curnice = os.nice(0) | ||
365 | nice = int(nice) - curnice | ||
366 | buildlog.verbose("Renice to %s " % os.nice(nice)) | ||
367 | |||
368 | if self.recipecache: | ||
369 | del self.recipecache | ||
370 | self.recipecache = bb.cache.CacheData(self.caches_array) | ||
371 | |||
372 | self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) ) | ||
373 | |||
374 | def runCommands(self, server, data, abort): | ||
375 | """ | ||
376 | Run any queued asynchronous command | ||
377 | This is done by the idle handler so it runs in true context rather than | ||
378 | tied to any UI. | ||
379 | """ | ||
380 | |||
381 | return self.command.runAsyncCommand() | ||
382 | |||
383 | def showVersions(self): | ||
384 | |||
385 | pkg_pn = self.recipecache.pkg_pn | ||
386 | (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn) | ||
387 | |||
388 | logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version") | ||
389 | logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================") | ||
390 | |||
391 | for p in sorted(pkg_pn): | ||
392 | pref = preferred_versions[p] | ||
393 | latest = latest_versions[p] | ||
394 | |||
395 | prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2] | ||
396 | lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] | ||
397 | |||
398 | if pref == latest: | ||
399 | prefstr = "" | ||
400 | |||
401 | logger.plain("%-35s %25s %25s", p, lateststr, prefstr) | ||
402 | |||
403 | def showEnvironment(self, buildfile = None, pkgs_to_build = []): | ||
404 | """ | ||
405 | Show the outer or per-package environment | ||
406 | """ | ||
407 | fn = None | ||
408 | envdata = None | ||
409 | |||
410 | if buildfile: | ||
411 | # Parse the configuration here. We need to do it explicitly here since | ||
412 | # this showEnvironment() code path doesn't use the cache | ||
413 | self.parseConfiguration() | ||
414 | |||
415 | fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile) | ||
416 | fn = self.matchFile(fn) | ||
417 | fn = bb.cache.Cache.realfn2virtual(fn, cls) | ||
418 | elif len(pkgs_to_build) == 1: | ||
419 | ignore = self.data.getVar("ASSUME_PROVIDED", True) or "" | ||
420 | if pkgs_to_build[0] in set(ignore.split()): | ||
421 | bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) | ||
422 | |||
423 | taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort) | ||
424 | |||
425 | targetid = taskdata.getbuild_id(pkgs_to_build[0]) | ||
426 | fnid = taskdata.build_targets[targetid][0] | ||
427 | fn = taskdata.fn_index[fnid] | ||
428 | else: | ||
429 | envdata = self.data | ||
430 | |||
431 | if fn: | ||
432 | try: | ||
433 | envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data) | ||
434 | except Exception as e: | ||
435 | parselog.exception("Unable to read %s", fn) | ||
436 | raise | ||
437 | |||
438 | # Display history | ||
439 | with closing(StringIO()) as env: | ||
440 | self.data.inchistory.emit(env) | ||
441 | logger.plain(env.getvalue()) | ||
442 | |||
443 | # emit variables and shell functions | ||
444 | data.update_data(envdata) | ||
445 | with closing(StringIO()) as env: | ||
446 | data.emit_env(env, envdata, True) | ||
447 | logger.plain(env.getvalue()) | ||
448 | |||
449 | # emit the metadata which isnt valid shell | ||
450 | data.expandKeys(envdata) | ||
451 | for e in envdata.keys(): | ||
452 | if data.getVarFlag( e, 'python', envdata ): | ||
453 | logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1)) | ||
454 | |||
455 | |||
456 | def buildTaskData(self, pkgs_to_build, task, abort): | ||
457 | """ | ||
458 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build | ||
459 | """ | ||
460 | bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data) | ||
461 | |||
462 | # A task of None means use the default task | ||
463 | if task is None: | ||
464 | task = self.configuration.cmd | ||
465 | |||
466 | fulltargetlist = self.checkPackages(pkgs_to_build) | ||
467 | |||
468 | localdata = data.createCopy(self.data) | ||
469 | bb.data.update_data(localdata) | ||
470 | bb.data.expandKeys(localdata) | ||
471 | taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist) | ||
472 | |||
473 | current = 0 | ||
474 | runlist = [] | ||
475 | for k in fulltargetlist: | ||
476 | ktask = task | ||
477 | if ":do_" in k: | ||
478 | k2 = k.split(":do_") | ||
479 | k = k2[0] | ||
480 | ktask = k2[1] | ||
481 | taskdata.add_provider(localdata, self.recipecache, k) | ||
482 | current += 1 | ||
483 | runlist.append([k, "do_%s" % ktask]) | ||
484 | bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data) | ||
485 | taskdata.add_unresolved(localdata, self.recipecache) | ||
486 | bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) | ||
487 | return taskdata, runlist, fulltargetlist | ||
488 | |||
489 | def prepareTreeData(self, pkgs_to_build, task): | ||
490 | """ | ||
491 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build | ||
492 | """ | ||
493 | |||
494 | # We set abort to False here to prevent unbuildable targets raising | ||
495 | # an exception when we're just generating data | ||
496 | taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False) | ||
497 | |||
498 | return runlist, taskdata | ||
499 | |||
500 | ######## WARNING : this function requires cache_extra to be enabled ######## | ||
501 | |||
502 | def generateTaskDepTreeData(self, pkgs_to_build, task): | ||
503 | """ | ||
504 | Create a dependency graph of pkgs_to_build including reverse dependency | ||
505 | information. | ||
506 | """ | ||
507 | runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) | ||
508 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist) | ||
509 | rq.rqdata.prepare() | ||
510 | return self.buildDependTree(rq, taskdata) | ||
511 | |||
512 | |||
513 | def buildDependTree(self, rq, taskdata): | ||
514 | seen_fnids = [] | ||
515 | depend_tree = {} | ||
516 | depend_tree["depends"] = {} | ||
517 | depend_tree["tdepends"] = {} | ||
518 | depend_tree["pn"] = {} | ||
519 | depend_tree["rdepends-pn"] = {} | ||
520 | depend_tree["packages"] = {} | ||
521 | depend_tree["rdepends-pkg"] = {} | ||
522 | depend_tree["rrecs-pkg"] = {} | ||
523 | depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities | ||
524 | |||
525 | for task in xrange(len(rq.rqdata.runq_fnid)): | ||
526 | taskname = rq.rqdata.runq_task[task] | ||
527 | fnid = rq.rqdata.runq_fnid[task] | ||
528 | fn = taskdata.fn_index[fnid] | ||
529 | pn = self.recipecache.pkg_fn[fn] | ||
530 | version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn] | ||
531 | if pn not in depend_tree["pn"]: | ||
532 | depend_tree["pn"][pn] = {} | ||
533 | depend_tree["pn"][pn]["filename"] = fn | ||
534 | depend_tree["pn"][pn]["version"] = version | ||
535 | depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None) | ||
536 | |||
537 | # if we have extra caches, list all attributes they bring in | ||
538 | extra_info = [] | ||
539 | for cache_class in self.caches_array: | ||
540 | if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): | ||
541 | cachefields = getattr(cache_class, 'cachefields', []) | ||
542 | extra_info = extra_info + cachefields | ||
543 | |||
544 | # for all attributes stored, add them to the dependency tree | ||
545 | for ei in extra_info: | ||
546 | depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn] | ||
547 | |||
548 | |||
549 | for dep in rq.rqdata.runq_depends[task]: | ||
550 | depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]] | ||
551 | deppn = self.recipecache.pkg_fn[depfn] | ||
552 | dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task]) | ||
553 | if not dotname in depend_tree["tdepends"]: | ||
554 | depend_tree["tdepends"][dotname] = [] | ||
555 | depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep])) | ||
556 | if fnid not in seen_fnids: | ||
557 | seen_fnids.append(fnid) | ||
558 | packages = [] | ||
559 | |||
560 | depend_tree["depends"][pn] = [] | ||
561 | for dep in taskdata.depids[fnid]: | ||
562 | depend_tree["depends"][pn].append(taskdata.build_names_index[dep]) | ||
563 | |||
564 | depend_tree["rdepends-pn"][pn] = [] | ||
565 | for rdep in taskdata.rdepids[fnid]: | ||
566 | depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep]) | ||
567 | |||
568 | rdepends = self.recipecache.rundeps[fn] | ||
569 | for package in rdepends: | ||
570 | depend_tree["rdepends-pkg"][package] = [] | ||
571 | for rdepend in rdepends[package]: | ||
572 | depend_tree["rdepends-pkg"][package].append(rdepend) | ||
573 | packages.append(package) | ||
574 | |||
575 | rrecs = self.recipecache.runrecs[fn] | ||
576 | for package in rrecs: | ||
577 | depend_tree["rrecs-pkg"][package] = [] | ||
578 | for rdepend in rrecs[package]: | ||
579 | depend_tree["rrecs-pkg"][package].append(rdepend) | ||
580 | if not package in packages: | ||
581 | packages.append(package) | ||
582 | |||
583 | for package in packages: | ||
584 | if package not in depend_tree["packages"]: | ||
585 | depend_tree["packages"][package] = {} | ||
586 | depend_tree["packages"][package]["pn"] = pn | ||
587 | depend_tree["packages"][package]["filename"] = fn | ||
588 | depend_tree["packages"][package]["version"] = version | ||
589 | |||
590 | return depend_tree | ||
591 | |||
592 | ######## WARNING : this function requires cache_extra to be enabled ######## | ||
593 | def generatePkgDepTreeData(self, pkgs_to_build, task): | ||
594 | """ | ||
595 | Create a dependency tree of pkgs_to_build, returning the data. | ||
596 | """ | ||
597 | _, taskdata = self.prepareTreeData(pkgs_to_build, task) | ||
598 | tasks_fnid = [] | ||
599 | if len(taskdata.tasks_name) != 0: | ||
600 | for task in xrange(len(taskdata.tasks_name)): | ||
601 | tasks_fnid.append(taskdata.tasks_fnid[task]) | ||
602 | |||
603 | seen_fnids = [] | ||
604 | depend_tree = {} | ||
605 | depend_tree["depends"] = {} | ||
606 | depend_tree["pn"] = {} | ||
607 | depend_tree["rdepends-pn"] = {} | ||
608 | depend_tree["rdepends-pkg"] = {} | ||
609 | depend_tree["rrecs-pkg"] = {} | ||
610 | |||
611 | # if we have extra caches, list all attributes they bring in | ||
612 | extra_info = [] | ||
613 | for cache_class in self.caches_array: | ||
614 | if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): | ||
615 | cachefields = getattr(cache_class, 'cachefields', []) | ||
616 | extra_info = extra_info + cachefields | ||
617 | |||
618 | for task in xrange(len(tasks_fnid)): | ||
619 | fnid = tasks_fnid[task] | ||
620 | fn = taskdata.fn_index[fnid] | ||
621 | pn = self.recipecache.pkg_fn[fn] | ||
622 | |||
623 | if pn not in depend_tree["pn"]: | ||
624 | depend_tree["pn"][pn] = {} | ||
625 | depend_tree["pn"][pn]["filename"] = fn | ||
626 | version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn] | ||
627 | depend_tree["pn"][pn]["version"] = version | ||
628 | rdepends = self.recipecache.rundeps[fn] | ||
629 | rrecs = self.recipecache.runrecs[fn] | ||
630 | depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None) | ||
631 | |||
632 | # for all extra attributes stored, add them to the dependency tree | ||
633 | for ei in extra_info: | ||
634 | depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn] | ||
635 | |||
636 | if fnid not in seen_fnids: | ||
637 | seen_fnids.append(fnid) | ||
638 | |||
639 | depend_tree["depends"][pn] = [] | ||
640 | for dep in taskdata.depids[fnid]: | ||
641 | item = taskdata.build_names_index[dep] | ||
642 | pn_provider = "" | ||
643 | targetid = taskdata.getbuild_id(item) | ||
644 | if targetid in taskdata.build_targets and taskdata.build_targets[targetid]: | ||
645 | id = taskdata.build_targets[targetid][0] | ||
646 | fn_provider = taskdata.fn_index[id] | ||
647 | pn_provider = self.recipecache.pkg_fn[fn_provider] | ||
648 | else: | ||
649 | pn_provider = item | ||
650 | depend_tree["depends"][pn].append(pn_provider) | ||
651 | |||
652 | depend_tree["rdepends-pn"][pn] = [] | ||
653 | for rdep in taskdata.rdepids[fnid]: | ||
654 | item = taskdata.run_names_index[rdep] | ||
655 | pn_rprovider = "" | ||
656 | targetid = taskdata.getrun_id(item) | ||
657 | if targetid in taskdata.run_targets and taskdata.run_targets[targetid]: | ||
658 | id = taskdata.run_targets[targetid][0] | ||
659 | fn_rprovider = taskdata.fn_index[id] | ||
660 | pn_rprovider = self.recipecache.pkg_fn[fn_rprovider] | ||
661 | else: | ||
662 | pn_rprovider = item | ||
663 | depend_tree["rdepends-pn"][pn].append(pn_rprovider) | ||
664 | |||
665 | depend_tree["rdepends-pkg"].update(rdepends) | ||
666 | depend_tree["rrecs-pkg"].update(rrecs) | ||
667 | |||
668 | return depend_tree | ||
669 | |||
670 | def generateDepTreeEvent(self, pkgs_to_build, task): | ||
671 | """ | ||
672 | Create a task dependency graph of pkgs_to_build. | ||
673 | Generate an event with the result | ||
674 | """ | ||
675 | depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) | ||
676 | bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data) | ||
677 | |||
678 | def generateDotGraphFiles(self, pkgs_to_build, task): | ||
679 | """ | ||
680 | Create a task dependency graph of pkgs_to_build. | ||
681 | Save the result to a set of .dot files. | ||
682 | """ | ||
683 | |||
684 | depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) | ||
685 | |||
686 | # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn | ||
687 | depends_file = file('pn-depends.dot', 'w' ) | ||
688 | buildlist_file = file('pn-buildlist', 'w' ) | ||
689 | print("digraph depends {", file=depends_file) | ||
690 | for pn in depgraph["pn"]: | ||
691 | fn = depgraph["pn"][pn]["filename"] | ||
692 | version = depgraph["pn"][pn]["version"] | ||
693 | print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) | ||
694 | print("%s" % pn, file=buildlist_file) | ||
695 | buildlist_file.close() | ||
696 | logger.info("PN build list saved to 'pn-buildlist'") | ||
697 | for pn in depgraph["depends"]: | ||
698 | for depend in depgraph["depends"][pn]: | ||
699 | print('"%s" -> "%s"' % (pn, depend), file=depends_file) | ||
700 | for pn in depgraph["rdepends-pn"]: | ||
701 | for rdepend in depgraph["rdepends-pn"][pn]: | ||
702 | print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file) | ||
703 | print("}", file=depends_file) | ||
704 | logger.info("PN dependencies saved to 'pn-depends.dot'") | ||
705 | |||
706 | depends_file = file('package-depends.dot', 'w' ) | ||
707 | print("digraph depends {", file=depends_file) | ||
708 | for package in depgraph["packages"]: | ||
709 | pn = depgraph["packages"][package]["pn"] | ||
710 | fn = depgraph["packages"][package]["filename"] | ||
711 | version = depgraph["packages"][package]["version"] | ||
712 | if package == pn: | ||
713 | print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) | ||
714 | else: | ||
715 | print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file) | ||
716 | for depend in depgraph["depends"][pn]: | ||
717 | print('"%s" -> "%s"' % (package, depend), file=depends_file) | ||
718 | for package in depgraph["rdepends-pkg"]: | ||
719 | for rdepend in depgraph["rdepends-pkg"][package]: | ||
720 | print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) | ||
721 | for package in depgraph["rrecs-pkg"]: | ||
722 | for rdepend in depgraph["rrecs-pkg"][package]: | ||
723 | print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) | ||
724 | print("}", file=depends_file) | ||
725 | logger.info("Package dependencies saved to 'package-depends.dot'") | ||
726 | |||
727 | tdepends_file = file('task-depends.dot', 'w' ) | ||
728 | print("digraph depends {", file=tdepends_file) | ||
729 | for task in depgraph["tdepends"]: | ||
730 | (pn, taskname) = task.rsplit(".", 1) | ||
731 | fn = depgraph["pn"][pn]["filename"] | ||
732 | version = depgraph["pn"][pn]["version"] | ||
733 | print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file) | ||
734 | for dep in depgraph["tdepends"][task]: | ||
735 | print('"%s" -> "%s"' % (task, dep), file=tdepends_file) | ||
736 | print("}", file=tdepends_file) | ||
737 | logger.info("Task dependencies saved to 'task-depends.dot'") | ||
738 | |||
739 | def show_appends_with_no_recipes( self ): | ||
740 | appends_without_recipes = [self.collection.appendlist[recipe] | ||
741 | for recipe in self.collection.appendlist | ||
742 | if recipe not in self.collection.appliedappendlist] | ||
743 | if appends_without_recipes: | ||
744 | appendlines = (' %s' % append | ||
745 | for appends in appends_without_recipes | ||
746 | for append in appends) | ||
747 | msg = 'No recipes available for:\n%s' % '\n'.join(appendlines) | ||
748 | warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \ | ||
749 | self.data, False) or "no" | ||
750 | if warn_only.lower() in ("1", "yes", "true"): | ||
751 | bb.warn(msg) | ||
752 | else: | ||
753 | bb.fatal(msg) | ||
754 | |||
755 | def handlePrefProviders(self): | ||
756 | |||
757 | localdata = data.createCopy(self.data) | ||
758 | bb.data.update_data(localdata) | ||
759 | bb.data.expandKeys(localdata) | ||
760 | |||
761 | # Handle PREFERRED_PROVIDERS | ||
762 | for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split(): | ||
763 | try: | ||
764 | (providee, provider) = p.split(':') | ||
765 | except: | ||
766 | providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p) | ||
767 | continue | ||
768 | if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider: | ||
769 | providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee]) | ||
770 | self.recipecache.preferred[providee] = provider | ||
771 | |||
772 | def findCoreBaseFiles(self, subdir, configfile): | ||
773 | corebase = self.data.getVar('COREBASE', True) or "" | ||
774 | paths = [] | ||
775 | for root, dirs, files in os.walk(corebase + '/' + subdir): | ||
776 | for d in dirs: | ||
777 | configfilepath = os.path.join(root, d, configfile) | ||
778 | if os.path.exists(configfilepath): | ||
779 | paths.append(os.path.join(root, d)) | ||
780 | |||
781 | if paths: | ||
782 | bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data) | ||
783 | |||
784 | def findConfigFilePath(self, configfile): | ||
785 | """ | ||
786 | Find the location on disk of configfile and if it exists and was parsed by BitBake | ||
787 | emit the ConfigFilePathFound event with the path to the file. | ||
788 | """ | ||
789 | path = bb.cookerdata.findConfigFile(configfile, self.data) | ||
790 | if not path: | ||
791 | return | ||
792 | |||
793 | # Generate a list of parsed configuration files by searching the files | ||
794 | # listed in the __depends and __base_depends variables with a .conf suffix. | ||
795 | conffiles = [] | ||
796 | dep_files = self.data.getVar('__base_depends') or [] | ||
797 | dep_files = dep_files + (self.data.getVar('__depends') or []) | ||
798 | |||
799 | for f in dep_files: | ||
800 | if f[0].endswith(".conf"): | ||
801 | conffiles.append(f[0]) | ||
802 | |||
803 | _, conf, conffile = path.rpartition("conf/") | ||
804 | match = os.path.join(conf, conffile) | ||
805 | # Try and find matches for conf/conffilename.conf as we don't always | ||
806 | # have the full path to the file. | ||
807 | for cfg in conffiles: | ||
808 | if cfg.endswith(match): | ||
809 | bb.event.fire(bb.event.ConfigFilePathFound(path), | ||
810 | self.data) | ||
811 | break | ||
812 | |||
813 | def findFilesMatchingInDir(self, filepattern, directory): | ||
814 | """ | ||
815 | Searches for files matching the regex 'pattern' which are children of | ||
816 | 'directory' in each BBPATH. i.e. to find all rootfs package classes available | ||
817 | to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes') | ||
818 | or to find all machine configuration files one could call: | ||
819 | findFilesMatchingInDir(self, 'conf/machines', 'conf') | ||
820 | """ | ||
821 | import re | ||
822 | |||
823 | matches = [] | ||
824 | p = re.compile(re.escape(filepattern)) | ||
825 | bbpaths = self.data.getVar('BBPATH', True).split(':') | ||
826 | for path in bbpaths: | ||
827 | dirpath = os.path.join(path, directory) | ||
828 | if os.path.exists(dirpath): | ||
829 | for root, dirs, files in os.walk(dirpath): | ||
830 | for f in files: | ||
831 | if p.search(f): | ||
832 | matches.append(f) | ||
833 | |||
834 | if matches: | ||
835 | bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) | ||
836 | |||
837 | def findConfigFiles(self, varname): | ||
838 | """ | ||
839 | Find config files which are appropriate values for varname. | ||
840 | i.e. MACHINE, DISTRO | ||
841 | """ | ||
842 | possible = [] | ||
843 | var = varname.lower() | ||
844 | |||
845 | data = self.data | ||
846 | # iterate configs | ||
847 | bbpaths = data.getVar('BBPATH', True).split(':') | ||
848 | for path in bbpaths: | ||
849 | confpath = os.path.join(path, "conf", var) | ||
850 | if os.path.exists(confpath): | ||
851 | for root, dirs, files in os.walk(confpath): | ||
852 | # get all child files, these are appropriate values | ||
853 | for f in files: | ||
854 | val, sep, end = f.rpartition('.') | ||
855 | if end == 'conf': | ||
856 | possible.append(val) | ||
857 | |||
858 | if possible: | ||
859 | bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data) | ||
860 | |||
861 | def findInheritsClass(self, klass): | ||
862 | """ | ||
863 | Find all recipes which inherit the specified class | ||
864 | """ | ||
865 | pkg_list = [] | ||
866 | |||
867 | for pfn in self.recipecache.pkg_fn: | ||
868 | inherits = self.recipecache.inherits.get(pfn, None) | ||
869 | if inherits and inherits.count(klass) > 0: | ||
870 | pkg_list.append(self.recipecache.pkg_fn[pfn]) | ||
871 | |||
872 | return pkg_list | ||
873 | |||
874 | def generateTargetsTree(self, klass=None, pkgs=[]): | ||
875 | """ | ||
876 | Generate a dependency tree of buildable targets | ||
877 | Generate an event with the result | ||
878 | """ | ||
879 | # if the caller hasn't specified a pkgs list default to universe | ||
880 | if not len(pkgs): | ||
881 | pkgs = ['universe'] | ||
882 | # if inherited_class passed ensure all recipes which inherit the | ||
883 | # specified class are included in pkgs | ||
884 | if klass: | ||
885 | extra_pkgs = self.findInheritsClass(klass) | ||
886 | pkgs = pkgs + extra_pkgs | ||
887 | |||
888 | # generate a dependency tree for all our packages | ||
889 | tree = self.generatePkgDepTreeData(pkgs, 'build') | ||
890 | bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data) | ||
891 | |||
892 | def buildWorldTargetList(self): | ||
893 | """ | ||
894 | Build package list for "bitbake world" | ||
895 | """ | ||
896 | parselog.debug(1, "collating packages for \"world\"") | ||
897 | for f in self.recipecache.possible_world: | ||
898 | terminal = True | ||
899 | pn = self.recipecache.pkg_fn[f] | ||
900 | |||
901 | for p in self.recipecache.pn_provides[pn]: | ||
902 | if p.startswith('virtual/'): | ||
903 | parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p) | ||
904 | terminal = False | ||
905 | break | ||
906 | for pf in self.recipecache.providers[p]: | ||
907 | if self.recipecache.pkg_fn[pf] != pn: | ||
908 | parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p) | ||
909 | terminal = False | ||
910 | break | ||
911 | if terminal: | ||
912 | self.recipecache.world_target.add(pn) | ||
913 | |||
914 | def interactiveMode( self ): | ||
915 | """Drop off into a shell""" | ||
916 | try: | ||
917 | from bb import shell | ||
918 | except ImportError: | ||
919 | parselog.exception("Interactive mode not available") | ||
920 | sys.exit(1) | ||
921 | else: | ||
922 | shell.start( self ) | ||
923 | |||
924 | |||
925 | def handleCollections( self, collections ): | ||
926 | """Handle collections""" | ||
927 | errors = False | ||
928 | self.recipecache.bbfile_config_priorities = [] | ||
929 | if collections: | ||
930 | collection_priorities = {} | ||
931 | collection_depends = {} | ||
932 | collection_list = collections.split() | ||
933 | min_prio = 0 | ||
934 | for c in collection_list: | ||
935 | # Get collection priority if defined explicitly | ||
936 | priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True) | ||
937 | if priority: | ||
938 | try: | ||
939 | prio = int(priority) | ||
940 | except ValueError: | ||
941 | parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority) | ||
942 | errors = True | ||
943 | if min_prio == 0 or prio < min_prio: | ||
944 | min_prio = prio | ||
945 | collection_priorities[c] = prio | ||
946 | else: | ||
947 | collection_priorities[c] = None | ||
948 | |||
949 | # Check dependencies and store information for priority calculation | ||
950 | deps = self.data.getVar("LAYERDEPENDS_%s" % c, True) | ||
951 | if deps: | ||
952 | depnamelist = [] | ||
953 | deplist = deps.split() | ||
954 | for dep in deplist: | ||
955 | depsplit = dep.split(':') | ||
956 | if len(depsplit) > 1: | ||
957 | try: | ||
958 | depver = int(depsplit[1]) | ||
959 | except ValueError: | ||
960 | parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep) | ||
961 | errors = True | ||
962 | continue | ||
963 | else: | ||
964 | depver = None | ||
965 | dep = depsplit[0] | ||
966 | depnamelist.append(dep) | ||
967 | |||
968 | if dep in collection_list: | ||
969 | if depver: | ||
970 | layerver = self.data.getVar("LAYERVERSION_%s" % dep, True) | ||
971 | if layerver: | ||
972 | try: | ||
973 | lver = int(layerver) | ||
974 | except ValueError: | ||
975 | parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver) | ||
976 | errors = True | ||
977 | continue | ||
978 | if lver != depver: | ||
979 | parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver) | ||
980 | errors = True | ||
981 | else: | ||
982 | parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep) | ||
983 | errors = True | ||
984 | else: | ||
985 | parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep) | ||
986 | errors = True | ||
987 | collection_depends[c] = depnamelist | ||
988 | else: | ||
989 | collection_depends[c] = [] | ||
990 | |||
991 | # Recursively work out collection priorities based on dependencies | ||
992 | def calc_layer_priority(collection): | ||
993 | if not collection_priorities[collection]: | ||
994 | max_depprio = min_prio | ||
995 | for dep in collection_depends[collection]: | ||
996 | calc_layer_priority(dep) | ||
997 | depprio = collection_priorities[dep] | ||
998 | if depprio > max_depprio: | ||
999 | max_depprio = depprio | ||
1000 | max_depprio += 1 | ||
1001 | parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio) | ||
1002 | collection_priorities[collection] = max_depprio | ||
1003 | |||
1004 | # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities | ||
1005 | for c in collection_list: | ||
1006 | calc_layer_priority(c) | ||
1007 | regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True) | ||
1008 | if regex == None: | ||
1009 | parselog.error("BBFILE_PATTERN_%s not defined" % c) | ||
1010 | errors = True | ||
1011 | continue | ||
1012 | try: | ||
1013 | cre = re.compile(regex) | ||
1014 | except re.error: | ||
1015 | parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex) | ||
1016 | errors = True | ||
1017 | continue | ||
1018 | self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c])) | ||
1019 | if errors: | ||
1020 | # We've already printed the actual error(s) | ||
1021 | raise CollectionError("Errors during parsing layer configuration") | ||
1022 | |||
1023 | def buildSetVars(self): | ||
1024 | """ | ||
1025 | Setup any variables needed before starting a build | ||
1026 | """ | ||
1027 | if not self.data.getVar("BUILDNAME"): | ||
1028 | self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M')) | ||
1029 | self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime())) | ||
1030 | |||
1031 | def matchFiles(self, bf): | ||
1032 | """ | ||
1033 | Find the .bb files which match the expression in 'buildfile'. | ||
1034 | """ | ||
1035 | if bf.startswith("/") or bf.startswith("../"): | ||
1036 | bf = os.path.abspath(bf) | ||
1037 | |||
1038 | self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities) | ||
1039 | filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data) | ||
1040 | try: | ||
1041 | os.stat(bf) | ||
1042 | bf = os.path.abspath(bf) | ||
1043 | return [bf] | ||
1044 | except OSError: | ||
1045 | regexp = re.compile(bf) | ||
1046 | matches = [] | ||
1047 | for f in filelist: | ||
1048 | if regexp.search(f) and os.path.isfile(f): | ||
1049 | matches.append(f) | ||
1050 | return matches | ||
1051 | |||
1052 | def matchFile(self, buildfile): | ||
1053 | """ | ||
1054 | Find the .bb file which matches the expression in 'buildfile'. | ||
1055 | Raise an error if multiple files | ||
1056 | """ | ||
1057 | matches = self.matchFiles(buildfile) | ||
1058 | if len(matches) != 1: | ||
1059 | if matches: | ||
1060 | msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches)) | ||
1061 | if matches: | ||
1062 | for f in matches: | ||
1063 | msg += "\n %s" % f | ||
1064 | parselog.error(msg) | ||
1065 | else: | ||
1066 | parselog.error("Unable to find any recipe file matching '%s'" % buildfile) | ||
1067 | raise NoSpecificMatch | ||
1068 | return matches[0] | ||
1069 | |||
1070 | def buildFile(self, buildfile, task): | ||
1071 | """ | ||
1072 | Build the file matching regexp buildfile | ||
1073 | """ | ||
1074 | |||
1075 | # Too many people use -b because they think it's how you normally | ||
1076 | # specify a target to be built, so show a warning | ||
1077 | bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.") | ||
1078 | |||
1079 | # Parse the configuration here. We need to do it explicitly here since | ||
1080 | # buildFile() doesn't use the cache | ||
1081 | self.parseConfiguration() | ||
1082 | |||
1083 | # If we are told to do the None task then query the default task | ||
1084 | if (task == None): | ||
1085 | task = self.configuration.cmd | ||
1086 | |||
1087 | fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile) | ||
1088 | fn = self.matchFile(fn) | ||
1089 | |||
1090 | self.buildSetVars() | ||
1091 | |||
1092 | infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \ | ||
1093 | self.data, | ||
1094 | self.caches_array) | ||
1095 | infos = dict(infos) | ||
1096 | |||
1097 | fn = bb.cache.Cache.realfn2virtual(fn, cls) | ||
1098 | try: | ||
1099 | info_array = infos[fn] | ||
1100 | except KeyError: | ||
1101 | bb.fatal("%s does not exist" % fn) | ||
1102 | |||
1103 | if info_array[0].skipped: | ||
1104 | bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason)) | ||
1105 | |||
1106 | self.recipecache.add_from_recipeinfo(fn, info_array) | ||
1107 | |||
1108 | # Tweak some variables | ||
1109 | item = info_array[0].pn | ||
1110 | self.recipecache.ignored_dependencies = set() | ||
1111 | self.recipecache.bbfile_priority[fn] = 1 | ||
1112 | |||
1113 | # Remove external dependencies | ||
1114 | self.recipecache.task_deps[fn]['depends'] = {} | ||
1115 | self.recipecache.deps[fn] = [] | ||
1116 | self.recipecache.rundeps[fn] = [] | ||
1117 | self.recipecache.runrecs[fn] = [] | ||
1118 | |||
1119 | # Invalidate task for target if force mode active | ||
1120 | if self.configuration.force: | ||
1121 | logger.verbose("Invalidate task %s, %s", task, fn) | ||
1122 | bb.parse.siggen.invalidate_task('do_%s' % task, self.recipecache, fn) | ||
1123 | |||
1124 | # Setup taskdata structure | ||
1125 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
1126 | taskdata.add_provider(self.data, self.recipecache, item) | ||
1127 | |||
1128 | buildname = self.data.getVar("BUILDNAME") | ||
1129 | bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data) | ||
1130 | |||
1131 | # Execute the runqueue | ||
1132 | runlist = [[item, "do_%s" % task]] | ||
1133 | |||
1134 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist) | ||
1135 | |||
1136 | def buildFileIdle(server, rq, abort): | ||
1137 | |||
1138 | msg = None | ||
1139 | if abort or self.state == state.forceshutdown: | ||
1140 | rq.finish_runqueue(True) | ||
1141 | msg = "Forced shutdown" | ||
1142 | elif self.state == state.shutdown: | ||
1143 | rq.finish_runqueue(False) | ||
1144 | msg = "Stopped build" | ||
1145 | failures = 0 | ||
1146 | try: | ||
1147 | retval = rq.execute_runqueue() | ||
1148 | except runqueue.TaskFailure as exc: | ||
1149 | failures += len(exc.args) | ||
1150 | retval = False | ||
1151 | except SystemExit as exc: | ||
1152 | self.command.finishAsyncCommand() | ||
1153 | return False | ||
1154 | |||
1155 | if not retval: | ||
1156 | bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data) | ||
1157 | self.command.finishAsyncCommand(msg) | ||
1158 | return False | ||
1159 | if retval is True: | ||
1160 | return True | ||
1161 | return retval | ||
1162 | |||
1163 | self.configuration.server_register_idlecallback(buildFileIdle, rq) | ||
1164 | |||
1165 | def buildTargets(self, targets, task): | ||
1166 | """ | ||
1167 | Attempt to build the targets specified | ||
1168 | """ | ||
1169 | |||
1170 | def buildTargetsIdle(server, rq, abort): | ||
1171 | msg = None | ||
1172 | if abort or self.state == state.forceshutdown: | ||
1173 | rq.finish_runqueue(True) | ||
1174 | msg = "Forced shutdown" | ||
1175 | elif self.state == state.shutdown: | ||
1176 | rq.finish_runqueue(False) | ||
1177 | msg = "Stopped build" | ||
1178 | failures = 0 | ||
1179 | try: | ||
1180 | retval = rq.execute_runqueue() | ||
1181 | except runqueue.TaskFailure as exc: | ||
1182 | failures += len(exc.args) | ||
1183 | retval = False | ||
1184 | except SystemExit as exc: | ||
1185 | self.command.finishAsyncCommand() | ||
1186 | return False | ||
1187 | |||
1188 | if not retval: | ||
1189 | bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data) | ||
1190 | self.command.finishAsyncCommand(msg) | ||
1191 | return False | ||
1192 | if retval is True: | ||
1193 | return True | ||
1194 | return retval | ||
1195 | |||
1196 | self.buildSetVars() | ||
1197 | |||
1198 | taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort) | ||
1199 | |||
1200 | buildname = self.data.getVar("BUILDNAME") | ||
1201 | bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data) | ||
1202 | |||
1203 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist) | ||
1204 | if 'universe' in targets: | ||
1205 | rq.rqdata.warn_multi_bb = True | ||
1206 | |||
1207 | self.configuration.server_register_idlecallback(buildTargetsIdle, rq) | ||
1208 | |||
1209 | |||
1210 | def getAllKeysWithFlags(self, flaglist): | ||
1211 | dump = {} | ||
1212 | for k in self.data.keys(): | ||
1213 | try: | ||
1214 | v = self.data.getVar(k, True) | ||
1215 | if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart): | ||
1216 | dump[k] = { | ||
1217 | 'v' : v , | ||
1218 | 'history' : self.data.varhistory.variable(k), | ||
1219 | } | ||
1220 | for d in flaglist: | ||
1221 | dump[k][d] = self.data.getVarFlag(k, d) | ||
1222 | except Exception as e: | ||
1223 | print(e) | ||
1224 | return dump | ||
1225 | |||
1226 | |||
1227 | def generateNewImage(self, image, base_image, package_queue, timestamp, description): | ||
1228 | ''' | ||
1229 | Create a new image with a "require"/"inherit" base_image statement | ||
1230 | ''' | ||
1231 | import re | ||
1232 | if timestamp: | ||
1233 | image_name = os.path.splitext(image)[0] | ||
1234 | timestr = time.strftime("-%Y%m%d-%H%M%S") | ||
1235 | dest = image_name + str(timestr) + ".bb" | ||
1236 | else: | ||
1237 | if not image.endswith(".bb"): | ||
1238 | dest = image + ".bb" | ||
1239 | else: | ||
1240 | dest = image | ||
1241 | |||
1242 | basename = False | ||
1243 | if base_image: | ||
1244 | with open(base_image, 'r') as f: | ||
1245 | require_line = f.readline() | ||
1246 | p = re.compile("IMAGE_BASENAME *=") | ||
1247 | for line in f: | ||
1248 | if p.search(line): | ||
1249 | basename = True | ||
1250 | |||
1251 | with open(dest, "w") as imagefile: | ||
1252 | if base_image is None: | ||
1253 | imagefile.write("inherit core-image\n") | ||
1254 | else: | ||
1255 | topdir = self.data.getVar("TOPDIR") | ||
1256 | if topdir in base_image: | ||
1257 | base_image = require_line.split()[1] | ||
1258 | imagefile.write("require " + base_image + "\n") | ||
1259 | image_install = "IMAGE_INSTALL = \"" | ||
1260 | for package in package_queue: | ||
1261 | image_install += str(package) + " " | ||
1262 | image_install += "\"\n" | ||
1263 | imagefile.write(image_install) | ||
1264 | |||
1265 | description_var = "DESCRIPTION = \"" + description + "\"\n" | ||
1266 | imagefile.write(description_var) | ||
1267 | |||
1268 | if basename: | ||
1269 | # If this is overwritten in a inherited image, reset it to default | ||
1270 | image_basename = "IMAGE_BASENAME = \"${PN}\"\n" | ||
1271 | imagefile.write(image_basename) | ||
1272 | |||
1273 | self.state = state.initial | ||
1274 | if timestamp: | ||
1275 | return timestr | ||
1276 | |||
1277 | # This is called for all async commands when self.state != running | ||
1278 | def updateCache(self): | ||
1279 | if self.state == state.running: | ||
1280 | return | ||
1281 | |||
1282 | if self.state in (state.shutdown, state.forceshutdown): | ||
1283 | if hasattr(self.parser, 'shutdown'): | ||
1284 | self.parser.shutdown(clean=False, force = True) | ||
1285 | raise bb.BBHandledException() | ||
1286 | |||
1287 | if self.state != state.parsing: | ||
1288 | self.parseConfiguration () | ||
1289 | if CookerFeatures.SEND_SANITYEVENTS in self.featureset: | ||
1290 | bb.event.fire(bb.event.SanityCheck(False), self.data) | ||
1291 | |||
1292 | ignore = self.data.getVar("ASSUME_PROVIDED", True) or "" | ||
1293 | self.recipecache.ignored_dependencies = set(ignore.split()) | ||
1294 | |||
1295 | for dep in self.configuration.extra_assume_provided: | ||
1296 | self.recipecache.ignored_dependencies.add(dep) | ||
1297 | |||
1298 | self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities) | ||
1299 | (filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data) | ||
1300 | |||
1301 | self.data.renameVar("__depends", "__base_depends") | ||
1302 | |||
1303 | self.parser = CookerParser(self, filelist, masked) | ||
1304 | self.state = state.parsing | ||
1305 | |||
1306 | if not self.parser.parse_next(): | ||
1307 | collectlog.debug(1, "parsing complete") | ||
1308 | if self.parser.error: | ||
1309 | raise bb.BBHandledException() | ||
1310 | self.show_appends_with_no_recipes() | ||
1311 | self.handlePrefProviders() | ||
1312 | self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn) | ||
1313 | self.state = state.running | ||
1314 | return None | ||
1315 | |||
1316 | return True | ||
1317 | |||
1318 | def checkPackages(self, pkgs_to_build): | ||
1319 | |||
1320 | # Return a copy, don't modify the original | ||
1321 | pkgs_to_build = pkgs_to_build[:] | ||
1322 | |||
1323 | if len(pkgs_to_build) == 0: | ||
1324 | raise NothingToBuild | ||
1325 | |||
1326 | ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split() | ||
1327 | for pkg in pkgs_to_build: | ||
1328 | if pkg in ignore: | ||
1329 | parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg) | ||
1330 | |||
1331 | if 'world' in pkgs_to_build: | ||
1332 | self.buildWorldTargetList() | ||
1333 | pkgs_to_build.remove('world') | ||
1334 | for t in self.recipecache.world_target: | ||
1335 | pkgs_to_build.append(t) | ||
1336 | |||
1337 | if 'universe' in pkgs_to_build: | ||
1338 | parselog.warn("The \"universe\" target is only intended for testing and may produce errors.") | ||
1339 | parselog.debug(1, "collating packages for \"universe\"") | ||
1340 | pkgs_to_build.remove('universe') | ||
1341 | for t in self.recipecache.universe_target: | ||
1342 | pkgs_to_build.append(t) | ||
1343 | |||
1344 | return pkgs_to_build | ||
1345 | |||
1346 | |||
1347 | |||
1348 | |||
1349 | def pre_serve(self): | ||
1350 | # Empty the environment. The environment will be populated as | ||
1351 | # necessary from the data store. | ||
1352 | #bb.utils.empty_environment() | ||
1353 | try: | ||
1354 | self.prhost = prserv.serv.auto_start(self.data) | ||
1355 | except prserv.serv.PRServiceConfigError: | ||
1356 | bb.event.fire(CookerExit(), self.event_data) | ||
1357 | self.state = state.error | ||
1358 | return | ||
1359 | |||
1360 | def post_serve(self): | ||
1361 | prserv.serv.auto_shutdown(self.data) | ||
1362 | bb.event.fire(CookerExit(), self.event_data) | ||
1363 | |||
1364 | def shutdown(self, force = False): | ||
1365 | if force: | ||
1366 | self.state = state.forceshutdown | ||
1367 | else: | ||
1368 | self.state = state.shutdown | ||
1369 | |||
1370 | def finishcommand(self): | ||
1371 | self.state = state.initial | ||
1372 | |||
1373 | def reset(self): | ||
1374 | self.initConfigurationData() | ||
1375 | |||
1376 | def server_main(cooker, func, *args): | ||
1377 | cooker.pre_serve() | ||
1378 | |||
1379 | if cooker.configuration.profile: | ||
1380 | try: | ||
1381 | import cProfile as profile | ||
1382 | except: | ||
1383 | import profile | ||
1384 | prof = profile.Profile() | ||
1385 | |||
1386 | ret = profile.Profile.runcall(prof, func, *args) | ||
1387 | |||
1388 | prof.dump_stats("profile.log") | ||
1389 | bb.utils.process_profilelog("profile.log") | ||
1390 | print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed") | ||
1391 | |||
1392 | else: | ||
1393 | ret = func(*args) | ||
1394 | |||
1395 | cooker.post_serve() | ||
1396 | |||
1397 | return ret | ||
1398 | |||
1399 | class CookerExit(bb.event.Event): | ||
1400 | """ | ||
1401 | Notify clients of the Cooker shutdown | ||
1402 | """ | ||
1403 | |||
1404 | def __init__(self): | ||
1405 | bb.event.Event.__init__(self) | ||
1406 | |||
1407 | |||
1408 | class CookerCollectFiles(object): | ||
1409 | def __init__(self, priorities): | ||
1410 | self.appendlist = {} | ||
1411 | self.appliedappendlist = [] | ||
1412 | self.bbfile_config_priorities = priorities | ||
1413 | |||
1414 | def calc_bbfile_priority( self, filename, matched = None ): | ||
1415 | for _, _, regex, pri in self.bbfile_config_priorities: | ||
1416 | if regex.match(filename): | ||
1417 | if matched != None: | ||
1418 | if not regex in matched: | ||
1419 | matched.add(regex) | ||
1420 | return pri | ||
1421 | return 0 | ||
1422 | |||
1423 | def get_bbfiles(self): | ||
1424 | """Get list of default .bb files by reading out the current directory""" | ||
1425 | path = os.getcwd() | ||
1426 | contents = os.listdir(path) | ||
1427 | bbfiles = [] | ||
1428 | for f in contents: | ||
1429 | if f.endswith(".bb"): | ||
1430 | bbfiles.append(os.path.abspath(os.path.join(path, f))) | ||
1431 | return bbfiles | ||
1432 | |||
1433 | def find_bbfiles(self, path): | ||
1434 | """Find all the .bb and .bbappend files in a directory""" | ||
1435 | found = [] | ||
1436 | for dir, dirs, files in os.walk(path): | ||
1437 | for ignored in ('SCCS', 'CVS', '.svn'): | ||
1438 | if ignored in dirs: | ||
1439 | dirs.remove(ignored) | ||
1440 | found += [os.path.join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))] | ||
1441 | |||
1442 | return found | ||
1443 | |||
1444 | def collect_bbfiles(self, config, eventdata): | ||
1445 | """Collect all available .bb build files""" | ||
1446 | masked = 0 | ||
1447 | |||
1448 | collectlog.debug(1, "collecting .bb files") | ||
1449 | |||
1450 | files = (config.getVar( "BBFILES", True) or "").split() | ||
1451 | config.setVar("BBFILES", " ".join(files)) | ||
1452 | |||
1453 | # Sort files by priority | ||
1454 | files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) ) | ||
1455 | |||
1456 | if not len(files): | ||
1457 | files = self.get_bbfiles() | ||
1458 | |||
1459 | if not len(files): | ||
1460 | collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") | ||
1461 | bb.event.fire(CookerExit(), eventdata) | ||
1462 | |||
1463 | # Can't use set here as order is important | ||
1464 | newfiles = [] | ||
1465 | for f in files: | ||
1466 | if os.path.isdir(f): | ||
1467 | dirfiles = self.find_bbfiles(f) | ||
1468 | for g in dirfiles: | ||
1469 | if g not in newfiles: | ||
1470 | newfiles.append(g) | ||
1471 | else: | ||
1472 | globbed = glob.glob(f) | ||
1473 | if not globbed and os.path.exists(f): | ||
1474 | globbed = [f] | ||
1475 | for g in globbed: | ||
1476 | if g not in newfiles: | ||
1477 | newfiles.append(g) | ||
1478 | |||
1479 | bbmask = config.getVar('BBMASK', True) | ||
1480 | |||
1481 | if bbmask: | ||
1482 | try: | ||
1483 | bbmask_compiled = re.compile(bbmask) | ||
1484 | except sre_constants.error: | ||
1485 | collectlog.critical("BBMASK is not a valid regular expression, ignoring.") | ||
1486 | return list(newfiles), 0 | ||
1487 | |||
1488 | bbfiles = [] | ||
1489 | bbappend = [] | ||
1490 | for f in newfiles: | ||
1491 | if bbmask and bbmask_compiled.search(f): | ||
1492 | collectlog.debug(1, "skipping masked file %s", f) | ||
1493 | masked += 1 | ||
1494 | continue | ||
1495 | if f.endswith('.bb'): | ||
1496 | bbfiles.append(f) | ||
1497 | elif f.endswith('.bbappend'): | ||
1498 | bbappend.append(f) | ||
1499 | else: | ||
1500 | collectlog.debug(1, "skipping %s: unknown file extension", f) | ||
1501 | |||
1502 | # Build a list of .bbappend files for each .bb file | ||
1503 | for f in bbappend: | ||
1504 | base = os.path.basename(f).replace('.bbappend', '.bb') | ||
1505 | if not base in self.appendlist: | ||
1506 | self.appendlist[base] = [] | ||
1507 | if f not in self.appendlist[base]: | ||
1508 | self.appendlist[base].append(f) | ||
1509 | |||
1510 | # Find overlayed recipes | ||
1511 | # bbfiles will be in priority order which makes this easy | ||
1512 | bbfile_seen = dict() | ||
1513 | self.overlayed = defaultdict(list) | ||
1514 | for f in reversed(bbfiles): | ||
1515 | base = os.path.basename(f) | ||
1516 | if base not in bbfile_seen: | ||
1517 | bbfile_seen[base] = f | ||
1518 | else: | ||
1519 | topfile = bbfile_seen[base] | ||
1520 | self.overlayed[topfile].append(f) | ||
1521 | |||
1522 | return (bbfiles, masked) | ||
1523 | |||
1524 | def get_file_appends(self, fn): | ||
1525 | """ | ||
1526 | Returns a list of .bbappend files to apply to fn | ||
1527 | """ | ||
1528 | filelist = [] | ||
1529 | f = os.path.basename(fn) | ||
1530 | for bbappend in self.appendlist: | ||
1531 | if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])): | ||
1532 | self.appliedappendlist.append(bbappend) | ||
1533 | for filename in self.appendlist[bbappend]: | ||
1534 | filelist.append(filename) | ||
1535 | return filelist | ||
1536 | |||
1537 | def collection_priorities(self, pkgfns): | ||
1538 | |||
1539 | priorities = {} | ||
1540 | |||
1541 | # Calculate priorities for each file | ||
1542 | matched = set() | ||
1543 | for p in pkgfns: | ||
1544 | realfn, cls = bb.cache.Cache.virtualfn2realfn(p) | ||
1545 | priorities[p] = self.calc_bbfile_priority(realfn, matched) | ||
1546 | |||
1547 | # Don't show the warning if the BBFILE_PATTERN did match .bbappend files | ||
1548 | unmatched = set() | ||
1549 | for _, _, regex, pri in self.bbfile_config_priorities: | ||
1550 | if not regex in matched: | ||
1551 | unmatched.add(regex) | ||
1552 | |||
1553 | def findmatch(regex): | ||
1554 | for bbfile in self.appendlist: | ||
1555 | for append in self.appendlist[bbfile]: | ||
1556 | if regex.match(append): | ||
1557 | return True | ||
1558 | return False | ||
1559 | |||
1560 | for unmatch in unmatched.copy(): | ||
1561 | if findmatch(unmatch): | ||
1562 | unmatched.remove(unmatch) | ||
1563 | |||
1564 | for collection, pattern, regex, _ in self.bbfile_config_priorities: | ||
1565 | if regex in unmatched: | ||
1566 | collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern)) | ||
1567 | |||
1568 | return priorities | ||
1569 | |||
1570 | class ParsingFailure(Exception): | ||
1571 | def __init__(self, realexception, recipe): | ||
1572 | self.realexception = realexception | ||
1573 | self.recipe = recipe | ||
1574 | Exception.__init__(self, realexception, recipe) | ||
1575 | |||
1576 | class Feeder(multiprocessing.Process): | ||
1577 | def __init__(self, jobs, to_parsers, quit): | ||
1578 | self.quit = quit | ||
1579 | self.jobs = jobs | ||
1580 | self.to_parsers = to_parsers | ||
1581 | multiprocessing.Process.__init__(self) | ||
1582 | |||
1583 | def run(self): | ||
1584 | while True: | ||
1585 | try: | ||
1586 | quit = self.quit.get_nowait() | ||
1587 | except Queue.Empty: | ||
1588 | pass | ||
1589 | else: | ||
1590 | if quit == 'cancel': | ||
1591 | self.to_parsers.cancel_join_thread() | ||
1592 | break | ||
1593 | |||
1594 | try: | ||
1595 | job = self.jobs.pop() | ||
1596 | except IndexError: | ||
1597 | break | ||
1598 | |||
1599 | try: | ||
1600 | self.to_parsers.put(job, timeout=0.5) | ||
1601 | except Queue.Full: | ||
1602 | self.jobs.insert(0, job) | ||
1603 | continue | ||
1604 | |||
1605 | class Parser(multiprocessing.Process): | ||
1606 | def __init__(self, jobs, results, quit, init, profile): | ||
1607 | self.jobs = jobs | ||
1608 | self.results = results | ||
1609 | self.quit = quit | ||
1610 | self.init = init | ||
1611 | multiprocessing.Process.__init__(self) | ||
1612 | self.context = bb.utils.get_context().copy() | ||
1613 | self.handlers = bb.event.get_class_handlers().copy() | ||
1614 | self.profile = profile | ||
1615 | |||
1616 | def run(self): | ||
1617 | |||
1618 | if not self.profile: | ||
1619 | self.realrun() | ||
1620 | return | ||
1621 | |||
1622 | try: | ||
1623 | import cProfile as profile | ||
1624 | except: | ||
1625 | import profile | ||
1626 | prof = profile.Profile() | ||
1627 | try: | ||
1628 | profile.Profile.runcall(prof, self.realrun) | ||
1629 | finally: | ||
1630 | logfile = "profile-parse-%s.log" % multiprocessing.current_process().name | ||
1631 | prof.dump_stats(logfile) | ||
1632 | bb.utils.process_profilelog(logfile) | ||
1633 | print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile)) | ||
1634 | |||
1635 | def realrun(self): | ||
1636 | if self.init: | ||
1637 | self.init() | ||
1638 | |||
1639 | pending = [] | ||
1640 | while True: | ||
1641 | try: | ||
1642 | self.quit.get_nowait() | ||
1643 | except Queue.Empty: | ||
1644 | pass | ||
1645 | else: | ||
1646 | self.results.cancel_join_thread() | ||
1647 | break | ||
1648 | |||
1649 | if pending: | ||
1650 | result = pending.pop() | ||
1651 | else: | ||
1652 | try: | ||
1653 | job = self.jobs.get(timeout=0.25) | ||
1654 | except Queue.Empty: | ||
1655 | continue | ||
1656 | |||
1657 | if job is None: | ||
1658 | break | ||
1659 | result = self.parse(*job) | ||
1660 | |||
1661 | try: | ||
1662 | self.results.put(result, timeout=0.25) | ||
1663 | except Queue.Full: | ||
1664 | pending.append(result) | ||
1665 | |||
1666 | def parse(self, filename, appends, caches_array): | ||
1667 | try: | ||
1668 | # Reset our environment and handlers to the original settings | ||
1669 | bb.utils.set_context(self.context.copy()) | ||
1670 | bb.event.set_class_handlers(self.handlers.copy()) | ||
1671 | return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array) | ||
1672 | except Exception as exc: | ||
1673 | tb = sys.exc_info()[2] | ||
1674 | exc.recipe = filename | ||
1675 | exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) | ||
1676 | return True, exc | ||
1677 | # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown | ||
1678 | # and for example a worker thread doesn't just exit on its own in response to | ||
1679 | # a SystemExit event for example. | ||
1680 | except BaseException as exc: | ||
1681 | return True, ParsingFailure(exc, filename) | ||
1682 | |||
1683 | class CookerParser(object): | ||
1684 | def __init__(self, cooker, filelist, masked): | ||
1685 | self.filelist = filelist | ||
1686 | self.cooker = cooker | ||
1687 | self.cfgdata = cooker.data | ||
1688 | self.cfghash = cooker.data_hash | ||
1689 | |||
1690 | # Accounting statistics | ||
1691 | self.parsed = 0 | ||
1692 | self.cached = 0 | ||
1693 | self.error = 0 | ||
1694 | self.masked = masked | ||
1695 | |||
1696 | self.skipped = 0 | ||
1697 | self.virtuals = 0 | ||
1698 | self.total = len(filelist) | ||
1699 | |||
1700 | self.current = 0 | ||
1701 | self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or | ||
1702 | multiprocessing.cpu_count()) | ||
1703 | |||
1704 | self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array) | ||
1705 | self.fromcache = [] | ||
1706 | self.willparse = [] | ||
1707 | for filename in self.filelist: | ||
1708 | appends = self.cooker.collection.get_file_appends(filename) | ||
1709 | if not self.bb_cache.cacheValid(filename, appends): | ||
1710 | self.willparse.append((filename, appends, cooker.caches_array)) | ||
1711 | else: | ||
1712 | self.fromcache.append((filename, appends)) | ||
1713 | self.toparse = self.total - len(self.fromcache) | ||
1714 | self.progress_chunk = max(self.toparse / 100, 1) | ||
1715 | |||
1716 | self.start() | ||
1717 | self.haveshutdown = False | ||
1718 | |||
1719 | def start(self): | ||
1720 | self.results = self.load_cached() | ||
1721 | self.processes = [] | ||
1722 | if self.toparse: | ||
1723 | bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) | ||
1724 | def init(): | ||
1725 | Parser.cfg = self.cfgdata | ||
1726 | multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1) | ||
1727 | multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1) | ||
1728 | |||
1729 | self.feeder_quit = multiprocessing.Queue(maxsize=1) | ||
1730 | self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes) | ||
1731 | self.jobs = multiprocessing.Queue(maxsize=self.num_processes) | ||
1732 | self.result_queue = multiprocessing.Queue() | ||
1733 | self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit) | ||
1734 | self.feeder.start() | ||
1735 | for i in range(0, self.num_processes): | ||
1736 | parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile) | ||
1737 | parser.start() | ||
1738 | self.processes.append(parser) | ||
1739 | |||
1740 | self.results = itertools.chain(self.results, self.parse_generator()) | ||
1741 | |||
1742 | def shutdown(self, clean=True, force=False): | ||
1743 | if not self.toparse: | ||
1744 | return | ||
1745 | if self.haveshutdown: | ||
1746 | return | ||
1747 | self.haveshutdown = True | ||
1748 | |||
1749 | if clean: | ||
1750 | event = bb.event.ParseCompleted(self.cached, self.parsed, | ||
1751 | self.skipped, self.masked, | ||
1752 | self.virtuals, self.error, | ||
1753 | self.total) | ||
1754 | |||
1755 | bb.event.fire(event, self.cfgdata) | ||
1756 | self.feeder_quit.put(None) | ||
1757 | for process in self.processes: | ||
1758 | self.jobs.put(None) | ||
1759 | else: | ||
1760 | self.feeder_quit.put('cancel') | ||
1761 | |||
1762 | self.parser_quit.cancel_join_thread() | ||
1763 | for process in self.processes: | ||
1764 | self.parser_quit.put(None) | ||
1765 | |||
1766 | self.jobs.cancel_join_thread() | ||
1767 | |||
1768 | for process in self.processes: | ||
1769 | if force: | ||
1770 | process.join(.1) | ||
1771 | process.terminate() | ||
1772 | else: | ||
1773 | process.join() | ||
1774 | self.feeder.join() | ||
1775 | |||
1776 | sync = threading.Thread(target=self.bb_cache.sync) | ||
1777 | sync.start() | ||
1778 | multiprocessing.util.Finalize(None, sync.join, exitpriority=-100) | ||
1779 | bb.codeparser.parser_cache_savemerge(self.cooker.data) | ||
1780 | bb.fetch.fetcher_parse_done(self.cooker.data) | ||
1781 | |||
1782 | def load_cached(self): | ||
1783 | for filename, appends in self.fromcache: | ||
1784 | cached, infos = self.bb_cache.load(filename, appends, self.cfgdata) | ||
1785 | yield not cached, infos | ||
1786 | |||
1787 | def parse_generator(self): | ||
1788 | while True: | ||
1789 | if self.parsed >= self.toparse: | ||
1790 | break | ||
1791 | |||
1792 | try: | ||
1793 | result = self.result_queue.get(timeout=0.25) | ||
1794 | except Queue.Empty: | ||
1795 | pass | ||
1796 | else: | ||
1797 | value = result[1] | ||
1798 | if isinstance(value, BaseException): | ||
1799 | raise value | ||
1800 | else: | ||
1801 | yield result | ||
1802 | |||
1803 | def parse_next(self): | ||
1804 | result = [] | ||
1805 | parsed = None | ||
1806 | try: | ||
1807 | parsed, result = self.results.next() | ||
1808 | except StopIteration: | ||
1809 | self.shutdown() | ||
1810 | return False | ||
1811 | except bb.BBHandledException as exc: | ||
1812 | self.error += 1 | ||
1813 | logger.error('Failed to parse recipe: %s' % exc.recipe) | ||
1814 | self.shutdown(clean=False) | ||
1815 | return False | ||
1816 | except ParsingFailure as exc: | ||
1817 | self.error += 1 | ||
1818 | logger.error('Unable to parse %s: %s' % | ||
1819 | (exc.recipe, bb.exceptions.to_string(exc.realexception))) | ||
1820 | self.shutdown(clean=False) | ||
1821 | return False | ||
1822 | except bb.parse.ParseError as exc: | ||
1823 | self.error += 1 | ||
1824 | logger.error(str(exc)) | ||
1825 | self.shutdown(clean=False) | ||
1826 | return False | ||
1827 | except bb.data_smart.ExpansionError as exc: | ||
1828 | self.error += 1 | ||
1829 | _, value, _ = sys.exc_info() | ||
1830 | logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc)) | ||
1831 | self.shutdown(clean=False) | ||
1832 | return False | ||
1833 | except SyntaxError as exc: | ||
1834 | self.error += 1 | ||
1835 | logger.error('Unable to parse %s', exc.recipe) | ||
1836 | self.shutdown(clean=False) | ||
1837 | return False | ||
1838 | except Exception as exc: | ||
1839 | self.error += 1 | ||
1840 | etype, value, tb = sys.exc_info() | ||
1841 | if hasattr(value, "recipe"): | ||
1842 | logger.error('Unable to parse %s', value.recipe, | ||
1843 | exc_info=(etype, value, exc.traceback)) | ||
1844 | else: | ||
1845 | # Most likely, an exception occurred during raising an exception | ||
1846 | import traceback | ||
1847 | logger.error('Exception during parse: %s' % traceback.format_exc()) | ||
1848 | self.shutdown(clean=False) | ||
1849 | return False | ||
1850 | |||
1851 | self.current += 1 | ||
1852 | self.virtuals += len(result) | ||
1853 | if parsed: | ||
1854 | self.parsed += 1 | ||
1855 | if self.parsed % self.progress_chunk == 0: | ||
1856 | bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse), | ||
1857 | self.cfgdata) | ||
1858 | else: | ||
1859 | self.cached += 1 | ||
1860 | |||
1861 | for virtualfn, info_array in result: | ||
1862 | if info_array[0].skipped: | ||
1863 | self.skipped += 1 | ||
1864 | self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) | ||
1865 | self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache, | ||
1866 | parsed=parsed) | ||
1867 | return True | ||
1868 | |||
1869 | def reparse(self, filename): | ||
1870 | infos = self.bb_cache.parse(filename, | ||
1871 | self.cooker.collection.get_file_appends(filename), | ||
1872 | self.cfgdata, self.cooker.caches_array) | ||
1873 | for vfn, info_array in infos: | ||
1874 | self.cooker.recipecache.add_from_recipeinfo(vfn, info_array) | ||