diff options
Diffstat (limited to 'bitbake/lib/bb/cooker.py')
-rw-r--r-- | bitbake/lib/bb/cooker.py | 1838 |
1 files changed, 1838 insertions, 0 deletions
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py new file mode 100644 index 0000000000..ad36b34aa4 --- /dev/null +++ b/bitbake/lib/bb/cooker.py | |||
@@ -0,0 +1,1838 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | # | ||
5 | # Copyright (C) 2003, 2004 Chris Larson | ||
6 | # Copyright (C) 2003, 2004 Phil Blundell | ||
7 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
8 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
9 | # Copyright (C) 2005 ROAD GmbH | ||
10 | # Copyright (C) 2006 - 2007 Richard Purdie | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify | ||
13 | # it under the terms of the GNU General Public License version 2 as | ||
14 | # published by the Free Software Foundation. | ||
15 | # | ||
16 | # This program is distributed in the hope that it will be useful, | ||
17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
19 | # GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along | ||
22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
24 | |||
25 | from __future__ import print_function | ||
26 | import sys, os, glob, os.path, re, time | ||
27 | import atexit | ||
28 | import itertools | ||
29 | import logging | ||
30 | import multiprocessing | ||
31 | import sre_constants | ||
32 | import threading | ||
33 | from cStringIO import StringIO | ||
34 | from contextlib import closing | ||
35 | from functools import wraps | ||
36 | from collections import defaultdict | ||
37 | import bb, bb.exceptions, bb.command | ||
38 | from bb import utils, data, parse, event, cache, providers, taskdata, runqueue | ||
39 | import Queue | ||
40 | import prserv.serv | ||
41 | |||
42 | logger = logging.getLogger("BitBake") | ||
43 | collectlog = logging.getLogger("BitBake.Collection") | ||
44 | buildlog = logging.getLogger("BitBake.Build") | ||
45 | parselog = logging.getLogger("BitBake.Parsing") | ||
46 | providerlog = logging.getLogger("BitBake.Provider") | ||
47 | |||
48 | class NoSpecificMatch(bb.BBHandledException): | ||
49 | """ | ||
50 | Exception raised when no or multiple file matches are found | ||
51 | """ | ||
52 | |||
53 | class NothingToBuild(Exception): | ||
54 | """ | ||
55 | Exception raised when there is nothing to build | ||
56 | """ | ||
57 | |||
58 | class CollectionError(bb.BBHandledException): | ||
59 | """ | ||
60 | Exception raised when layer configuration is incorrect | ||
61 | """ | ||
62 | |||
63 | class state: | ||
64 | initial, parsing, running, shutdown, forceshutdown, stopped = range(6) | ||
65 | |||
66 | |||
67 | class SkippedPackage: | ||
68 | def __init__(self, info = None, reason = None): | ||
69 | self.pn = None | ||
70 | self.skipreason = None | ||
71 | self.provides = None | ||
72 | self.rprovides = None | ||
73 | |||
74 | if info: | ||
75 | self.pn = info.pn | ||
76 | self.skipreason = info.skipreason | ||
77 | self.provides = info.provides | ||
78 | self.rprovides = info.rprovides | ||
79 | elif reason: | ||
80 | self.skipreason = reason | ||
81 | |||
82 | |||
83 | class CookerFeatures(object): | ||
84 | _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE] = range(2) | ||
85 | |||
86 | def __init__(self): | ||
87 | self._features=set() | ||
88 | |||
89 | def setFeature(self, f): | ||
90 | # validate we got a request for a feature we support | ||
91 | if f not in CookerFeatures._feature_list: | ||
92 | return | ||
93 | self._features.add(f) | ||
94 | |||
95 | def __contains__(self, f): | ||
96 | return f in self._features | ||
97 | |||
98 | def __iter__(self): | ||
99 | return self._features.__iter__() | ||
100 | |||
101 | def next(self): | ||
102 | return self._features.next() | ||
103 | |||
104 | |||
105 | #============================================================================# | ||
106 | # BBCooker | ||
107 | #============================================================================# | ||
108 | class BBCooker: | ||
109 | """ | ||
110 | Manages one bitbake build run | ||
111 | """ | ||
112 | |||
113 | def __init__(self, configuration): | ||
114 | self.recipecache = None | ||
115 | self.skiplist = {} | ||
116 | self.featureset = CookerFeatures() | ||
117 | |||
118 | self.configuration = configuration | ||
119 | |||
120 | self.loadConfigurationData() | ||
121 | |||
122 | # Take a lock so only one copy of bitbake can run against a given build | ||
123 | # directory at a time | ||
124 | lockfile = self.data.expand("${TOPDIR}/bitbake.lock") | ||
125 | self.lock = bb.utils.lockfile(lockfile, False, False) | ||
126 | if not self.lock: | ||
127 | bb.fatal("Only one copy of bitbake should be run against a build directory") | ||
128 | |||
129 | # TOSTOP must not be set or our children will hang when they output | ||
130 | fd = sys.stdout.fileno() | ||
131 | if os.isatty(fd): | ||
132 | import termios | ||
133 | tcattr = termios.tcgetattr(fd) | ||
134 | if tcattr[3] & termios.TOSTOP: | ||
135 | buildlog.info("The terminal had the TOSTOP bit set, clearing...") | ||
136 | tcattr[3] = tcattr[3] & ~termios.TOSTOP | ||
137 | termios.tcsetattr(fd, termios.TCSANOW, tcattr) | ||
138 | |||
139 | self.command = bb.command.Command(self) | ||
140 | self.state = state.initial | ||
141 | |||
142 | self.parser = None | ||
143 | |||
144 | def initConfigurationData(self): | ||
145 | |||
146 | self.state = state.initial | ||
147 | |||
148 | self.caches_array = [] | ||
149 | |||
150 | all_extra_cache_names = [] | ||
151 | # We hardcode all known cache types in a single place, here. | ||
152 | if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: | ||
153 | all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") | ||
154 | |||
155 | caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names | ||
156 | |||
157 | # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! | ||
158 | # This is the entry point, no further check needed! | ||
159 | for var in caches_name_array: | ||
160 | try: | ||
161 | module_name, cache_name = var.split(':') | ||
162 | module = __import__(module_name, fromlist=(cache_name,)) | ||
163 | self.caches_array.append(getattr(module, cache_name)) | ||
164 | except ImportError as exc: | ||
165 | logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc)) | ||
166 | sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name) | ||
167 | |||
168 | self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) | ||
169 | self.data = self.databuilder.data | ||
170 | |||
171 | def enableDataTracking(self): | ||
172 | self.configuration.tracking = True | ||
173 | self.data.enableTracking() | ||
174 | |||
175 | def disableDataTracking(self): | ||
176 | self.configuration.tracking = False | ||
177 | self.data.disableTracking() | ||
178 | |||
179 | def loadConfigurationData(self): | ||
180 | self.initConfigurationData() | ||
181 | self.databuilder.parseBaseConfiguration() | ||
182 | self.data = self.databuilder.data | ||
183 | self.data_hash = self.databuilder.data_hash | ||
184 | |||
185 | # | ||
186 | # Special updated configuration we use for firing events | ||
187 | # | ||
188 | self.event_data = bb.data.createCopy(self.data) | ||
189 | bb.data.update_data(self.event_data) | ||
190 | bb.parse.init_parser(self.event_data) | ||
191 | |||
192 | def modifyConfigurationVar(self, var, val, default_file, op): | ||
193 | if op == "append": | ||
194 | self.appendConfigurationVar(var, val, default_file) | ||
195 | elif op == "set": | ||
196 | self.saveConfigurationVar(var, val, default_file, "=") | ||
197 | elif op == "earlyAssign": | ||
198 | self.saveConfigurationVar(var, val, default_file, "?=") | ||
199 | |||
200 | |||
201 | def appendConfigurationVar(self, var, val, default_file): | ||
202 | #add append var operation to the end of default_file | ||
203 | default_file = bb.cookerdata.findConfigFile(default_file, self.data) | ||
204 | |||
205 | with open(default_file, 'r') as f: | ||
206 | contents = f.readlines() | ||
207 | f.close() | ||
208 | |||
209 | total = "" | ||
210 | for c in contents: | ||
211 | total += c | ||
212 | |||
213 | total += "#added by hob" | ||
214 | total += "\n%s += \"%s\"\n" % (var, val) | ||
215 | |||
216 | with open(default_file, 'w') as f: | ||
217 | f.write(total) | ||
218 | f.close() | ||
219 | |||
220 | #add to history | ||
221 | loginfo = {"op":append, "file":default_file, "line":total.count("\n")} | ||
222 | self.data.appendVar(var, val, **loginfo) | ||
223 | |||
224 | def saveConfigurationVar(self, var, val, default_file, op): | ||
225 | |||
226 | replaced = False | ||
227 | #do not save if nothing changed | ||
228 | if str(val) == self.data.getVar(var): | ||
229 | return | ||
230 | |||
231 | conf_files = self.data.varhistory.get_variable_files(var) | ||
232 | |||
233 | #format the value when it is a list | ||
234 | if isinstance(val, list): | ||
235 | listval = "" | ||
236 | for value in val: | ||
237 | listval += "%s " % value | ||
238 | val = listval | ||
239 | |||
240 | topdir = self.data.getVar("TOPDIR") | ||
241 | |||
242 | #comment or replace operations made on var | ||
243 | for conf_file in conf_files: | ||
244 | if topdir in conf_file: | ||
245 | with open(conf_file, 'r') as f: | ||
246 | contents = f.readlines() | ||
247 | f.close() | ||
248 | |||
249 | lines = self.data.varhistory.get_variable_lines(var, conf_file) | ||
250 | for line in lines: | ||
251 | total = "" | ||
252 | i = 0 | ||
253 | for c in contents: | ||
254 | total += c | ||
255 | i = i + 1 | ||
256 | if i==int(line): | ||
257 | end_index = len(total) | ||
258 | index = total.rfind(var, 0, end_index) | ||
259 | |||
260 | begin_line = total.count("\n",0,index) | ||
261 | end_line = int(line) | ||
262 | |||
263 | #check if the variable was saved before in the same way | ||
264 | #if true it replace the place where the variable was declared | ||
265 | #else it comments it | ||
266 | if contents[begin_line-1]== "#added by hob\n": | ||
267 | contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val) | ||
268 | replaced = True | ||
269 | else: | ||
270 | for ii in range(begin_line, end_line): | ||
271 | contents[ii] = "#" + contents[ii] | ||
272 | |||
273 | total = "" | ||
274 | for c in contents: | ||
275 | total += c | ||
276 | with open(conf_file, 'w') as f: | ||
277 | f.write(total) | ||
278 | f.close() | ||
279 | |||
280 | if replaced == False: | ||
281 | #remove var from history | ||
282 | self.data.varhistory.del_var_history(var) | ||
283 | |||
284 | #add var to the end of default_file | ||
285 | default_file = bb.cookerdata.findConfigFile(default_file, self.data) | ||
286 | |||
287 | with open(default_file, 'r') as f: | ||
288 | contents = f.readlines() | ||
289 | f.close() | ||
290 | |||
291 | total = "" | ||
292 | for c in contents: | ||
293 | total += c | ||
294 | |||
295 | #add the variable on a single line, to be easy to replace the second time | ||
296 | total += "\n#added by hob" | ||
297 | total += "\n%s %s \"%s\"\n" % (var, op, val) | ||
298 | |||
299 | with open(default_file, 'w') as f: | ||
300 | f.write(total) | ||
301 | f.close() | ||
302 | |||
303 | #add to history | ||
304 | loginfo = {"op":set, "file":default_file, "line":total.count("\n")} | ||
305 | self.data.setVar(var, val, **loginfo) | ||
306 | |||
307 | def removeConfigurationVar(self, var): | ||
308 | conf_files = self.data.varhistory.get_variable_files(var) | ||
309 | topdir = self.data.getVar("TOPDIR") | ||
310 | |||
311 | for conf_file in conf_files: | ||
312 | if topdir in conf_file: | ||
313 | with open(conf_file, 'r') as f: | ||
314 | contents = f.readlines() | ||
315 | f.close() | ||
316 | |||
317 | lines = self.data.varhistory.get_variable_lines(var, conf_file) | ||
318 | for line in lines: | ||
319 | total = "" | ||
320 | i = 0 | ||
321 | for c in contents: | ||
322 | total += c | ||
323 | i = i + 1 | ||
324 | if i==int(line): | ||
325 | end_index = len(total) | ||
326 | index = total.rfind(var, 0, end_index) | ||
327 | |||
328 | begin_line = total.count("\n",0,index) | ||
329 | |||
330 | #check if the variable was saved before in the same way | ||
331 | if contents[begin_line-1]== "#added by hob\n": | ||
332 | contents[begin_line-1] = contents[begin_line] = "\n" | ||
333 | else: | ||
334 | contents[begin_line] = "\n" | ||
335 | #remove var from history | ||
336 | self.data.varhistory.del_var_history(var, conf_file, line) | ||
337 | |||
338 | total = "" | ||
339 | for c in contents: | ||
340 | total += c | ||
341 | with open(conf_file, 'w') as f: | ||
342 | f.write(total) | ||
343 | f.close() | ||
344 | |||
345 | def createConfigFile(self, name): | ||
346 | path = os.getcwd() | ||
347 | confpath = os.path.join(path, "conf", name) | ||
348 | open(confpath, 'w').close() | ||
349 | |||
350 | def parseConfiguration(self): | ||
351 | |||
352 | # Set log file verbosity | ||
353 | verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0")) | ||
354 | if verboselogs: | ||
355 | bb.msg.loggerVerboseLogs = True | ||
356 | |||
357 | # Change nice level if we're asked to | ||
358 | nice = self.data.getVar("BB_NICE_LEVEL", True) | ||
359 | if nice: | ||
360 | curnice = os.nice(0) | ||
361 | nice = int(nice) - curnice | ||
362 | buildlog.verbose("Renice to %s " % os.nice(nice)) | ||
363 | |||
364 | if self.recipecache: | ||
365 | del self.recipecache | ||
366 | self.recipecache = bb.cache.CacheData(self.caches_array) | ||
367 | |||
368 | self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) ) | ||
369 | |||
370 | def runCommands(self, server, data, abort): | ||
371 | """ | ||
372 | Run any queued asynchronous command | ||
373 | This is done by the idle handler so it runs in true context rather than | ||
374 | tied to any UI. | ||
375 | """ | ||
376 | |||
377 | return self.command.runAsyncCommand() | ||
378 | |||
379 | def showVersions(self): | ||
380 | |||
381 | pkg_pn = self.recipecache.pkg_pn | ||
382 | (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn) | ||
383 | |||
384 | logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version") | ||
385 | logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================") | ||
386 | |||
387 | for p in sorted(pkg_pn): | ||
388 | pref = preferred_versions[p] | ||
389 | latest = latest_versions[p] | ||
390 | |||
391 | prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2] | ||
392 | lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] | ||
393 | |||
394 | if pref == latest: | ||
395 | prefstr = "" | ||
396 | |||
397 | logger.plain("%-35s %25s %25s", p, lateststr, prefstr) | ||
398 | |||
399 | def showEnvironment(self, buildfile = None, pkgs_to_build = []): | ||
400 | """ | ||
401 | Show the outer or per-package environment | ||
402 | """ | ||
403 | fn = None | ||
404 | envdata = None | ||
405 | |||
406 | if buildfile: | ||
407 | # Parse the configuration here. We need to do it explicitly here since | ||
408 | # this showEnvironment() code path doesn't use the cache | ||
409 | self.parseConfiguration() | ||
410 | |||
411 | fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile) | ||
412 | fn = self.matchFile(fn) | ||
413 | fn = bb.cache.Cache.realfn2virtual(fn, cls) | ||
414 | elif len(pkgs_to_build) == 1: | ||
415 | ignore = self.data.getVar("ASSUME_PROVIDED", True) or "" | ||
416 | if pkgs_to_build[0] in set(ignore.split()): | ||
417 | bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) | ||
418 | |||
419 | taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort) | ||
420 | |||
421 | targetid = taskdata.getbuild_id(pkgs_to_build[0]) | ||
422 | fnid = taskdata.build_targets[targetid][0] | ||
423 | fn = taskdata.fn_index[fnid] | ||
424 | else: | ||
425 | envdata = self.data | ||
426 | |||
427 | if fn: | ||
428 | try: | ||
429 | envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data) | ||
430 | except Exception as e: | ||
431 | parselog.exception("Unable to read %s", fn) | ||
432 | raise | ||
433 | |||
434 | # Display history | ||
435 | with closing(StringIO()) as env: | ||
436 | self.data.inchistory.emit(env) | ||
437 | logger.plain(env.getvalue()) | ||
438 | |||
439 | # emit variables and shell functions | ||
440 | data.update_data(envdata) | ||
441 | with closing(StringIO()) as env: | ||
442 | data.emit_env(env, envdata, True) | ||
443 | logger.plain(env.getvalue()) | ||
444 | |||
445 | # emit the metadata which isnt valid shell | ||
446 | data.expandKeys(envdata) | ||
447 | for e in envdata.keys(): | ||
448 | if data.getVarFlag( e, 'python', envdata ): | ||
449 | logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1)) | ||
450 | |||
451 | |||
452 | def buildTaskData(self, pkgs_to_build, task, abort): | ||
453 | """ | ||
454 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build | ||
455 | """ | ||
456 | bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data) | ||
457 | |||
458 | # A task of None means use the default task | ||
459 | if task is None: | ||
460 | task = self.configuration.cmd | ||
461 | |||
462 | fulltargetlist = self.checkPackages(pkgs_to_build) | ||
463 | |||
464 | localdata = data.createCopy(self.data) | ||
465 | bb.data.update_data(localdata) | ||
466 | bb.data.expandKeys(localdata) | ||
467 | taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist) | ||
468 | |||
469 | current = 0 | ||
470 | runlist = [] | ||
471 | for k in fulltargetlist: | ||
472 | taskdata.add_provider(localdata, self.recipecache, k) | ||
473 | current += 1 | ||
474 | runlist.append([k, "do_%s" % task]) | ||
475 | bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data) | ||
476 | taskdata.add_unresolved(localdata, self.recipecache) | ||
477 | bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) | ||
478 | return taskdata, runlist, fulltargetlist | ||
479 | |||
480 | def prepareTreeData(self, pkgs_to_build, task): | ||
481 | """ | ||
482 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build | ||
483 | """ | ||
484 | |||
485 | # We set abort to False here to prevent unbuildable targets raising | ||
486 | # an exception when we're just generating data | ||
487 | taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False) | ||
488 | |||
489 | return runlist, taskdata | ||
490 | |||
491 | ######## WARNING : this function requires cache_extra to be enabled ######## | ||
492 | |||
493 | def generateTaskDepTreeData(self, pkgs_to_build, task): | ||
494 | """ | ||
495 | Create a dependency graph of pkgs_to_build including reverse dependency | ||
496 | information. | ||
497 | """ | ||
498 | runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) | ||
499 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist) | ||
500 | rq.rqdata.prepare() | ||
501 | return self.buildDependTree(rq, taskdata) | ||
502 | |||
503 | |||
504 | def buildDependTree(self, rq, taskdata): | ||
505 | seen_fnids = [] | ||
506 | depend_tree = {} | ||
507 | depend_tree["depends"] = {} | ||
508 | depend_tree["tdepends"] = {} | ||
509 | depend_tree["pn"] = {} | ||
510 | depend_tree["rdepends-pn"] = {} | ||
511 | depend_tree["packages"] = {} | ||
512 | depend_tree["rdepends-pkg"] = {} | ||
513 | depend_tree["rrecs-pkg"] = {} | ||
514 | |||
515 | for task in xrange(len(rq.rqdata.runq_fnid)): | ||
516 | taskname = rq.rqdata.runq_task[task] | ||
517 | fnid = rq.rqdata.runq_fnid[task] | ||
518 | fn = taskdata.fn_index[fnid] | ||
519 | pn = self.recipecache.pkg_fn[fn] | ||
520 | version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn] | ||
521 | if pn not in depend_tree["pn"]: | ||
522 | depend_tree["pn"][pn] = {} | ||
523 | depend_tree["pn"][pn]["filename"] = fn | ||
524 | depend_tree["pn"][pn]["version"] = version | ||
525 | |||
526 | # if we have extra caches, list all attributes they bring in | ||
527 | extra_info = [] | ||
528 | for cache_class in self.caches_array: | ||
529 | if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): | ||
530 | cachefields = getattr(cache_class, 'cachefields', []) | ||
531 | extra_info = extra_info + cachefields | ||
532 | |||
533 | # for all attributes stored, add them to the dependency tree | ||
534 | for ei in extra_info: | ||
535 | depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn] | ||
536 | |||
537 | |||
538 | for dep in rq.rqdata.runq_depends[task]: | ||
539 | depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]] | ||
540 | deppn = self.recipecache.pkg_fn[depfn] | ||
541 | dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task]) | ||
542 | if not dotname in depend_tree["tdepends"]: | ||
543 | depend_tree["tdepends"][dotname] = [] | ||
544 | depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep])) | ||
545 | if fnid not in seen_fnids: | ||
546 | seen_fnids.append(fnid) | ||
547 | packages = [] | ||
548 | |||
549 | depend_tree["depends"][pn] = [] | ||
550 | for dep in taskdata.depids[fnid]: | ||
551 | depend_tree["depends"][pn].append(taskdata.build_names_index[dep]) | ||
552 | |||
553 | depend_tree["rdepends-pn"][pn] = [] | ||
554 | for rdep in taskdata.rdepids[fnid]: | ||
555 | depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep]) | ||
556 | |||
557 | rdepends = self.recipecache.rundeps[fn] | ||
558 | for package in rdepends: | ||
559 | depend_tree["rdepends-pkg"][package] = [] | ||
560 | for rdepend in rdepends[package]: | ||
561 | depend_tree["rdepends-pkg"][package].append(rdepend) | ||
562 | packages.append(package) | ||
563 | |||
564 | rrecs = self.recipecache.runrecs[fn] | ||
565 | for package in rrecs: | ||
566 | depend_tree["rrecs-pkg"][package] = [] | ||
567 | for rdepend in rrecs[package]: | ||
568 | depend_tree["rrecs-pkg"][package].append(rdepend) | ||
569 | if not package in packages: | ||
570 | packages.append(package) | ||
571 | |||
572 | for package in packages: | ||
573 | if package not in depend_tree["packages"]: | ||
574 | depend_tree["packages"][package] = {} | ||
575 | depend_tree["packages"][package]["pn"] = pn | ||
576 | depend_tree["packages"][package]["filename"] = fn | ||
577 | depend_tree["packages"][package]["version"] = version | ||
578 | |||
579 | return depend_tree | ||
580 | |||
581 | ######## WARNING : this function requires cache_extra to be enabled ######## | ||
582 | def generatePkgDepTreeData(self, pkgs_to_build, task): | ||
583 | """ | ||
584 | Create a dependency tree of pkgs_to_build, returning the data. | ||
585 | """ | ||
586 | _, taskdata = self.prepareTreeData(pkgs_to_build, task) | ||
587 | tasks_fnid = [] | ||
588 | if len(taskdata.tasks_name) != 0: | ||
589 | for task in xrange(len(taskdata.tasks_name)): | ||
590 | tasks_fnid.append(taskdata.tasks_fnid[task]) | ||
591 | |||
592 | seen_fnids = [] | ||
593 | depend_tree = {} | ||
594 | depend_tree["depends"] = {} | ||
595 | depend_tree["pn"] = {} | ||
596 | depend_tree["rdepends-pn"] = {} | ||
597 | depend_tree["rdepends-pkg"] = {} | ||
598 | depend_tree["rrecs-pkg"] = {} | ||
599 | |||
600 | # if we have extra caches, list all attributes they bring in | ||
601 | extra_info = [] | ||
602 | for cache_class in self.caches_array: | ||
603 | if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'): | ||
604 | cachefields = getattr(cache_class, 'cachefields', []) | ||
605 | extra_info = extra_info + cachefields | ||
606 | |||
607 | for task in xrange(len(tasks_fnid)): | ||
608 | fnid = tasks_fnid[task] | ||
609 | fn = taskdata.fn_index[fnid] | ||
610 | pn = self.recipecache.pkg_fn[fn] | ||
611 | |||
612 | if pn not in depend_tree["pn"]: | ||
613 | depend_tree["pn"][pn] = {} | ||
614 | depend_tree["pn"][pn]["filename"] = fn | ||
615 | version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn] | ||
616 | depend_tree["pn"][pn]["version"] = version | ||
617 | rdepends = self.recipecache.rundeps[fn] | ||
618 | rrecs = self.recipecache.runrecs[fn] | ||
619 | depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None) | ||
620 | |||
621 | # for all extra attributes stored, add them to the dependency tree | ||
622 | for ei in extra_info: | ||
623 | depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn] | ||
624 | |||
625 | if fnid not in seen_fnids: | ||
626 | seen_fnids.append(fnid) | ||
627 | |||
628 | depend_tree["depends"][pn] = [] | ||
629 | for dep in taskdata.depids[fnid]: | ||
630 | item = taskdata.build_names_index[dep] | ||
631 | pn_provider = "" | ||
632 | targetid = taskdata.getbuild_id(item) | ||
633 | if targetid in taskdata.build_targets and taskdata.build_targets[targetid]: | ||
634 | id = taskdata.build_targets[targetid][0] | ||
635 | fn_provider = taskdata.fn_index[id] | ||
636 | pn_provider = self.recipecache.pkg_fn[fn_provider] | ||
637 | else: | ||
638 | pn_provider = item | ||
639 | depend_tree["depends"][pn].append(pn_provider) | ||
640 | |||
641 | depend_tree["rdepends-pn"][pn] = [] | ||
642 | for rdep in taskdata.rdepids[fnid]: | ||
643 | item = taskdata.run_names_index[rdep] | ||
644 | pn_rprovider = "" | ||
645 | targetid = taskdata.getrun_id(item) | ||
646 | if targetid in taskdata.run_targets and taskdata.run_targets[targetid]: | ||
647 | id = taskdata.run_targets[targetid][0] | ||
648 | fn_rprovider = taskdata.fn_index[id] | ||
649 | pn_rprovider = self.recipecache.pkg_fn[fn_rprovider] | ||
650 | else: | ||
651 | pn_rprovider = item | ||
652 | depend_tree["rdepends-pn"][pn].append(pn_rprovider) | ||
653 | |||
654 | depend_tree["rdepends-pkg"].update(rdepends) | ||
655 | depend_tree["rrecs-pkg"].update(rrecs) | ||
656 | |||
657 | return depend_tree | ||
658 | |||
659 | def generateDepTreeEvent(self, pkgs_to_build, task): | ||
660 | """ | ||
661 | Create a task dependency graph of pkgs_to_build. | ||
662 | Generate an event with the result | ||
663 | """ | ||
664 | depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) | ||
665 | bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data) | ||
666 | |||
667 | def generateDotGraphFiles(self, pkgs_to_build, task): | ||
668 | """ | ||
669 | Create a task dependency graph of pkgs_to_build. | ||
670 | Save the result to a set of .dot files. | ||
671 | """ | ||
672 | |||
673 | depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) | ||
674 | |||
675 | # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn | ||
676 | depends_file = file('pn-depends.dot', 'w' ) | ||
677 | buildlist_file = file('pn-buildlist', 'w' ) | ||
678 | print("digraph depends {", file=depends_file) | ||
679 | for pn in depgraph["pn"]: | ||
680 | fn = depgraph["pn"][pn]["filename"] | ||
681 | version = depgraph["pn"][pn]["version"] | ||
682 | print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) | ||
683 | print("%s" % pn, file=buildlist_file) | ||
684 | buildlist_file.close() | ||
685 | logger.info("PN build list saved to 'pn-buildlist'") | ||
686 | for pn in depgraph["depends"]: | ||
687 | for depend in depgraph["depends"][pn]: | ||
688 | print('"%s" -> "%s"' % (pn, depend), file=depends_file) | ||
689 | for pn in depgraph["rdepends-pn"]: | ||
690 | for rdepend in depgraph["rdepends-pn"][pn]: | ||
691 | print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file) | ||
692 | print("}", file=depends_file) | ||
693 | logger.info("PN dependencies saved to 'pn-depends.dot'") | ||
694 | |||
695 | depends_file = file('package-depends.dot', 'w' ) | ||
696 | print("digraph depends {", file=depends_file) | ||
697 | for package in depgraph["packages"]: | ||
698 | pn = depgraph["packages"][package]["pn"] | ||
699 | fn = depgraph["packages"][package]["filename"] | ||
700 | version = depgraph["packages"][package]["version"] | ||
701 | if package == pn: | ||
702 | print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) | ||
703 | else: | ||
704 | print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file) | ||
705 | for depend in depgraph["depends"][pn]: | ||
706 | print('"%s" -> "%s"' % (package, depend), file=depends_file) | ||
707 | for package in depgraph["rdepends-pkg"]: | ||
708 | for rdepend in depgraph["rdepends-pkg"][package]: | ||
709 | print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) | ||
710 | for package in depgraph["rrecs-pkg"]: | ||
711 | for rdepend in depgraph["rrecs-pkg"][package]: | ||
712 | print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) | ||
713 | print("}", file=depends_file) | ||
714 | logger.info("Package dependencies saved to 'package-depends.dot'") | ||
715 | |||
716 | tdepends_file = file('task-depends.dot', 'w' ) | ||
717 | print("digraph depends {", file=tdepends_file) | ||
718 | for task in depgraph["tdepends"]: | ||
719 | (pn, taskname) = task.rsplit(".", 1) | ||
720 | fn = depgraph["pn"][pn]["filename"] | ||
721 | version = depgraph["pn"][pn]["version"] | ||
722 | print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file) | ||
723 | for dep in depgraph["tdepends"][task]: | ||
724 | print('"%s" -> "%s"' % (task, dep), file=tdepends_file) | ||
725 | print("}", file=tdepends_file) | ||
726 | logger.info("Task dependencies saved to 'task-depends.dot'") | ||
727 | |||
728 | def show_appends_with_no_recipes( self ): | ||
729 | recipes = set(os.path.basename(f) | ||
730 | for f in self.recipecache.pkg_fn.iterkeys()) | ||
731 | recipes |= set(os.path.basename(f) | ||
732 | for f in self.skiplist.iterkeys()) | ||
733 | appended_recipes = self.collection.appendlist.iterkeys() | ||
734 | appends_without_recipes = [self.collection.appendlist[recipe] | ||
735 | for recipe in appended_recipes | ||
736 | if recipe not in recipes] | ||
737 | if appends_without_recipes: | ||
738 | appendlines = (' %s' % append | ||
739 | for appends in appends_without_recipes | ||
740 | for append in appends) | ||
741 | msg = 'No recipes available for:\n%s' % '\n'.join(appendlines) | ||
742 | warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \ | ||
743 | self.data, False) or "no" | ||
744 | if warn_only.lower() in ("1", "yes", "true"): | ||
745 | bb.warn(msg) | ||
746 | else: | ||
747 | bb.fatal(msg) | ||
748 | |||
749 | def handlePrefProviders(self): | ||
750 | |||
751 | localdata = data.createCopy(self.data) | ||
752 | bb.data.update_data(localdata) | ||
753 | bb.data.expandKeys(localdata) | ||
754 | |||
755 | # Handle PREFERRED_PROVIDERS | ||
756 | for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split(): | ||
757 | try: | ||
758 | (providee, provider) = p.split(':') | ||
759 | except: | ||
760 | providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p) | ||
761 | continue | ||
762 | if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider: | ||
763 | providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee]) | ||
764 | self.recipecache.preferred[providee] = provider | ||
765 | |||
766 | def findCoreBaseFiles(self, subdir, configfile): | ||
767 | corebase = self.data.getVar('COREBASE', True) or "" | ||
768 | paths = [] | ||
769 | for root, dirs, files in os.walk(corebase + '/' + subdir): | ||
770 | for d in dirs: | ||
771 | configfilepath = os.path.join(root, d, configfile) | ||
772 | if os.path.exists(configfilepath): | ||
773 | paths.append(os.path.join(root, d)) | ||
774 | |||
775 | if paths: | ||
776 | bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data) | ||
777 | |||
778 | def findConfigFilePath(self, configfile): | ||
779 | """ | ||
780 | Find the location on disk of configfile and if it exists and was parsed by BitBake | ||
781 | emit the ConfigFilePathFound event with the path to the file. | ||
782 | """ | ||
783 | path = bb.cookerdata.findConfigFile(configfile, self.data) | ||
784 | if not path: | ||
785 | return | ||
786 | |||
787 | # Generate a list of parsed configuration files by searching the files | ||
788 | # listed in the __depends and __base_depends variables with a .conf suffix. | ||
789 | conffiles = [] | ||
790 | dep_files = self.data.getVar('__base_depends') or [] | ||
791 | dep_files = dep_files + (self.data.getVar('__depends') or []) | ||
792 | |||
793 | for f in dep_files: | ||
794 | if f[0].endswith(".conf"): | ||
795 | conffiles.append(f[0]) | ||
796 | |||
797 | _, conf, conffile = path.rpartition("conf/") | ||
798 | match = os.path.join(conf, conffile) | ||
799 | # Try and find matches for conf/conffilename.conf as we don't always | ||
800 | # have the full path to the file. | ||
801 | for cfg in conffiles: | ||
802 | if cfg.endswith(match): | ||
803 | bb.event.fire(bb.event.ConfigFilePathFound(path), | ||
804 | self.data) | ||
805 | break | ||
806 | |||
807 | def findFilesMatchingInDir(self, filepattern, directory): | ||
808 | """ | ||
809 | Searches for files matching the regex 'pattern' which are children of | ||
810 | 'directory' in each BBPATH. i.e. to find all rootfs package classes available | ||
811 | to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes') | ||
812 | or to find all machine configuration files one could call: | ||
813 | findFilesMatchingInDir(self, 'conf/machines', 'conf') | ||
814 | """ | ||
815 | import re | ||
816 | |||
817 | matches = [] | ||
818 | p = re.compile(re.escape(filepattern)) | ||
819 | bbpaths = self.data.getVar('BBPATH', True).split(':') | ||
820 | for path in bbpaths: | ||
821 | dirpath = os.path.join(path, directory) | ||
822 | if os.path.exists(dirpath): | ||
823 | for root, dirs, files in os.walk(dirpath): | ||
824 | for f in files: | ||
825 | if p.search(f): | ||
826 | matches.append(f) | ||
827 | |||
828 | if matches: | ||
829 | bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) | ||
830 | |||
831 | def findConfigFiles(self, varname): | ||
832 | """ | ||
833 | Find config files which are appropriate values for varname. | ||
834 | i.e. MACHINE, DISTRO | ||
835 | """ | ||
836 | possible = [] | ||
837 | var = varname.lower() | ||
838 | |||
839 | data = self.data | ||
840 | # iterate configs | ||
841 | bbpaths = data.getVar('BBPATH', True).split(':') | ||
842 | for path in bbpaths: | ||
843 | confpath = os.path.join(path, "conf", var) | ||
844 | if os.path.exists(confpath): | ||
845 | for root, dirs, files in os.walk(confpath): | ||
846 | # get all child files, these are appropriate values | ||
847 | for f in files: | ||
848 | val, sep, end = f.rpartition('.') | ||
849 | if end == 'conf': | ||
850 | possible.append(val) | ||
851 | |||
852 | if possible: | ||
853 | bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data) | ||
854 | |||
855 | def findInheritsClass(self, klass): | ||
856 | """ | ||
857 | Find all recipes which inherit the specified class | ||
858 | """ | ||
859 | pkg_list = [] | ||
860 | |||
861 | for pfn in self.recipecache.pkg_fn: | ||
862 | inherits = self.recipecache.inherits.get(pfn, None) | ||
863 | if inherits and inherits.count(klass) > 0: | ||
864 | pkg_list.append(self.recipecache.pkg_fn[pfn]) | ||
865 | |||
866 | return pkg_list | ||
867 | |||
868 | def generateTargetsTree(self, klass=None, pkgs=[]): | ||
869 | """ | ||
870 | Generate a dependency tree of buildable targets | ||
871 | Generate an event with the result | ||
872 | """ | ||
873 | # if the caller hasn't specified a pkgs list default to universe | ||
874 | if not len(pkgs): | ||
875 | pkgs = ['universe'] | ||
876 | # if inherited_class passed ensure all recipes which inherit the | ||
877 | # specified class are included in pkgs | ||
878 | if klass: | ||
879 | extra_pkgs = self.findInheritsClass(klass) | ||
880 | pkgs = pkgs + extra_pkgs | ||
881 | |||
882 | # generate a dependency tree for all our packages | ||
883 | tree = self.generatePkgDepTreeData(pkgs, 'build') | ||
884 | bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data) | ||
885 | |||
886 | def buildWorldTargetList(self): | ||
887 | """ | ||
888 | Build package list for "bitbake world" | ||
889 | """ | ||
890 | parselog.debug(1, "collating packages for \"world\"") | ||
891 | for f in self.recipecache.possible_world: | ||
892 | terminal = True | ||
893 | pn = self.recipecache.pkg_fn[f] | ||
894 | |||
895 | for p in self.recipecache.pn_provides[pn]: | ||
896 | if p.startswith('virtual/'): | ||
897 | parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p) | ||
898 | terminal = False | ||
899 | break | ||
900 | for pf in self.recipecache.providers[p]: | ||
901 | if self.recipecache.pkg_fn[pf] != pn: | ||
902 | parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p) | ||
903 | terminal = False | ||
904 | break | ||
905 | if terminal: | ||
906 | self.recipecache.world_target.add(pn) | ||
907 | |||
908 | def interactiveMode( self ): | ||
909 | """Drop off into a shell""" | ||
910 | try: | ||
911 | from bb import shell | ||
912 | except ImportError: | ||
913 | parselog.exception("Interactive mode not available") | ||
914 | sys.exit(1) | ||
915 | else: | ||
916 | shell.start( self ) | ||
917 | |||
918 | |||
919 | def handleCollections( self, collections ): | ||
920 | """Handle collections""" | ||
921 | errors = False | ||
922 | self.recipecache.bbfile_config_priorities = [] | ||
923 | if collections: | ||
924 | collection_priorities = {} | ||
925 | collection_depends = {} | ||
926 | collection_list = collections.split() | ||
927 | min_prio = 0 | ||
928 | for c in collection_list: | ||
929 | # Get collection priority if defined explicitly | ||
930 | priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True) | ||
931 | if priority: | ||
932 | try: | ||
933 | prio = int(priority) | ||
934 | except ValueError: | ||
935 | parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority) | ||
936 | errors = True | ||
937 | if min_prio == 0 or prio < min_prio: | ||
938 | min_prio = prio | ||
939 | collection_priorities[c] = prio | ||
940 | else: | ||
941 | collection_priorities[c] = None | ||
942 | |||
943 | # Check dependencies and store information for priority calculation | ||
944 | deps = self.data.getVar("LAYERDEPENDS_%s" % c, True) | ||
945 | if deps: | ||
946 | depnamelist = [] | ||
947 | deplist = deps.split() | ||
948 | for dep in deplist: | ||
949 | depsplit = dep.split(':') | ||
950 | if len(depsplit) > 1: | ||
951 | try: | ||
952 | depver = int(depsplit[1]) | ||
953 | except ValueError: | ||
954 | parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep) | ||
955 | errors = True | ||
956 | continue | ||
957 | else: | ||
958 | depver = None | ||
959 | dep = depsplit[0] | ||
960 | depnamelist.append(dep) | ||
961 | |||
962 | if dep in collection_list: | ||
963 | if depver: | ||
964 | layerver = self.data.getVar("LAYERVERSION_%s" % dep, True) | ||
965 | if layerver: | ||
966 | try: | ||
967 | lver = int(layerver) | ||
968 | except ValueError: | ||
969 | parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver) | ||
970 | errors = True | ||
971 | continue | ||
972 | if lver != depver: | ||
973 | parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver) | ||
974 | errors = True | ||
975 | else: | ||
976 | parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep) | ||
977 | errors = True | ||
978 | else: | ||
979 | parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep) | ||
980 | errors = True | ||
981 | collection_depends[c] = depnamelist | ||
982 | else: | ||
983 | collection_depends[c] = [] | ||
984 | |||
985 | # Recursively work out collection priorities based on dependencies | ||
986 | def calc_layer_priority(collection): | ||
987 | if not collection_priorities[collection]: | ||
988 | max_depprio = min_prio | ||
989 | for dep in collection_depends[collection]: | ||
990 | calc_layer_priority(dep) | ||
991 | depprio = collection_priorities[dep] | ||
992 | if depprio > max_depprio: | ||
993 | max_depprio = depprio | ||
994 | max_depprio += 1 | ||
995 | parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio) | ||
996 | collection_priorities[collection] = max_depprio | ||
997 | |||
998 | # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities | ||
999 | for c in collection_list: | ||
1000 | calc_layer_priority(c) | ||
1001 | regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True) | ||
1002 | if regex == None: | ||
1003 | parselog.error("BBFILE_PATTERN_%s not defined" % c) | ||
1004 | errors = True | ||
1005 | continue | ||
1006 | try: | ||
1007 | cre = re.compile(regex) | ||
1008 | except re.error: | ||
1009 | parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex) | ||
1010 | errors = True | ||
1011 | continue | ||
1012 | self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c])) | ||
1013 | if errors: | ||
1014 | # We've already printed the actual error(s) | ||
1015 | raise CollectionError("Errors during parsing layer configuration") | ||
1016 | |||
1017 | def buildSetVars(self): | ||
1018 | """ | ||
1019 | Setup any variables needed before starting a build | ||
1020 | """ | ||
1021 | if not self.data.getVar("BUILDNAME"): | ||
1022 | self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M')) | ||
1023 | self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime())) | ||
1024 | |||
1025 | def matchFiles(self, bf): | ||
1026 | """ | ||
1027 | Find the .bb files which match the expression in 'buildfile'. | ||
1028 | """ | ||
1029 | if bf.startswith("/") or bf.startswith("../"): | ||
1030 | bf = os.path.abspath(bf) | ||
1031 | |||
1032 | self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities) | ||
1033 | filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data) | ||
1034 | try: | ||
1035 | os.stat(bf) | ||
1036 | bf = os.path.abspath(bf) | ||
1037 | return [bf] | ||
1038 | except OSError: | ||
1039 | regexp = re.compile(bf) | ||
1040 | matches = [] | ||
1041 | for f in filelist: | ||
1042 | if regexp.search(f) and os.path.isfile(f): | ||
1043 | matches.append(f) | ||
1044 | return matches | ||
1045 | |||
1046 | def matchFile(self, buildfile): | ||
1047 | """ | ||
1048 | Find the .bb file which matches the expression in 'buildfile'. | ||
1049 | Raise an error if multiple files | ||
1050 | """ | ||
1051 | matches = self.matchFiles(buildfile) | ||
1052 | if len(matches) != 1: | ||
1053 | if matches: | ||
1054 | msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches)) | ||
1055 | if matches: | ||
1056 | for f in matches: | ||
1057 | msg += "\n %s" % f | ||
1058 | parselog.error(msg) | ||
1059 | else: | ||
1060 | parselog.error("Unable to find any recipe file matching '%s'" % buildfile) | ||
1061 | raise NoSpecificMatch | ||
1062 | return matches[0] | ||
1063 | |||
1064 | def buildFile(self, buildfile, task): | ||
1065 | """ | ||
1066 | Build the file matching regexp buildfile | ||
1067 | """ | ||
1068 | |||
1069 | # Too many people use -b because they think it's how you normally | ||
1070 | # specify a target to be built, so show a warning | ||
1071 | bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.") | ||
1072 | |||
1073 | # Parse the configuration here. We need to do it explicitly here since | ||
1074 | # buildFile() doesn't use the cache | ||
1075 | self.parseConfiguration() | ||
1076 | |||
1077 | # If we are told to do the None task then query the default task | ||
1078 | if (task == None): | ||
1079 | task = self.configuration.cmd | ||
1080 | |||
1081 | fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile) | ||
1082 | fn = self.matchFile(fn) | ||
1083 | |||
1084 | self.buildSetVars() | ||
1085 | |||
1086 | self.recipecache = bb.cache.CacheData(self.caches_array) | ||
1087 | infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \ | ||
1088 | self.data, | ||
1089 | self.caches_array) | ||
1090 | infos = dict(infos) | ||
1091 | |||
1092 | fn = bb.cache.Cache.realfn2virtual(fn, cls) | ||
1093 | try: | ||
1094 | info_array = infos[fn] | ||
1095 | except KeyError: | ||
1096 | bb.fatal("%s does not exist" % fn) | ||
1097 | |||
1098 | if info_array[0].skipped: | ||
1099 | bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason)) | ||
1100 | |||
1101 | self.recipecache.add_from_recipeinfo(fn, info_array) | ||
1102 | |||
1103 | # Tweak some variables | ||
1104 | item = info_array[0].pn | ||
1105 | self.recipecache.ignored_dependencies = set() | ||
1106 | self.recipecache.bbfile_priority[fn] = 1 | ||
1107 | |||
1108 | # Remove external dependencies | ||
1109 | self.recipecache.task_deps[fn]['depends'] = {} | ||
1110 | self.recipecache.deps[fn] = [] | ||
1111 | self.recipecache.rundeps[fn] = [] | ||
1112 | self.recipecache.runrecs[fn] = [] | ||
1113 | |||
1114 | # Invalidate task for target if force mode active | ||
1115 | if self.configuration.force: | ||
1116 | logger.verbose("Invalidate task %s, %s", task, fn) | ||
1117 | bb.parse.siggen.invalidate_task('do_%s' % task, self.recipecache, fn) | ||
1118 | |||
1119 | # Setup taskdata structure | ||
1120 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
1121 | taskdata.add_provider(self.data, self.recipecache, item) | ||
1122 | |||
1123 | buildname = self.data.getVar("BUILDNAME") | ||
1124 | bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data) | ||
1125 | |||
1126 | # Execute the runqueue | ||
1127 | runlist = [[item, "do_%s" % task]] | ||
1128 | |||
1129 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist) | ||
1130 | |||
1131 | def buildFileIdle(server, rq, abort): | ||
1132 | |||
1133 | if abort or self.state == state.forceshutdown: | ||
1134 | rq.finish_runqueue(True) | ||
1135 | elif self.state == state.shutdown: | ||
1136 | rq.finish_runqueue(False) | ||
1137 | failures = 0 | ||
1138 | try: | ||
1139 | retval = rq.execute_runqueue() | ||
1140 | except runqueue.TaskFailure as exc: | ||
1141 | failures += len(exc.args) | ||
1142 | retval = False | ||
1143 | except SystemExit as exc: | ||
1144 | self.command.finishAsyncCommand() | ||
1145 | return False | ||
1146 | |||
1147 | if not retval: | ||
1148 | bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data) | ||
1149 | self.command.finishAsyncCommand() | ||
1150 | return False | ||
1151 | if retval is True: | ||
1152 | return True | ||
1153 | return retval | ||
1154 | |||
1155 | self.configuration.server_register_idlecallback(buildFileIdle, rq) | ||
1156 | |||
1157 | def buildTargets(self, targets, task): | ||
1158 | """ | ||
1159 | Attempt to build the targets specified | ||
1160 | """ | ||
1161 | |||
1162 | def buildTargetsIdle(server, rq, abort): | ||
1163 | if abort or self.state == state.forceshutdown: | ||
1164 | rq.finish_runqueue(True) | ||
1165 | elif self.state == state.shutdown: | ||
1166 | rq.finish_runqueue(False) | ||
1167 | failures = 0 | ||
1168 | try: | ||
1169 | retval = rq.execute_runqueue() | ||
1170 | except runqueue.TaskFailure as exc: | ||
1171 | failures += len(exc.args) | ||
1172 | retval = False | ||
1173 | except SystemExit as exc: | ||
1174 | self.command.finishAsyncCommand() | ||
1175 | return False | ||
1176 | |||
1177 | if not retval: | ||
1178 | bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data) | ||
1179 | self.command.finishAsyncCommand() | ||
1180 | return False | ||
1181 | if retval is True: | ||
1182 | return True | ||
1183 | return retval | ||
1184 | |||
1185 | self.buildSetVars() | ||
1186 | |||
1187 | taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort) | ||
1188 | |||
1189 | buildname = self.data.getVar("BUILDNAME") | ||
1190 | bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data) | ||
1191 | |||
1192 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist) | ||
1193 | if 'universe' in targets: | ||
1194 | rq.rqdata.warn_multi_bb = True | ||
1195 | |||
1196 | self.configuration.server_register_idlecallback(buildTargetsIdle, rq) | ||
1197 | |||
1198 | |||
1199 | def getAllKeysWithFlags(self, flaglist): | ||
1200 | dump = {} | ||
1201 | for k in self.data.keys(): | ||
1202 | try: | ||
1203 | v = self.data.getVar(k, True) | ||
1204 | if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart): | ||
1205 | dump[k] = { 'v' : v } | ||
1206 | for d in flaglist: | ||
1207 | dump[k][d] = self.data.getVarFlag(k, d) | ||
1208 | except Exception as e: | ||
1209 | print(e) | ||
1210 | return dump | ||
1211 | |||
1212 | |||
1213 | def generateNewImage(self, image, base_image, package_queue, timestamp, description): | ||
1214 | ''' | ||
1215 | Create a new image with a "require"/"inherit" base_image statement | ||
1216 | ''' | ||
1217 | if timestamp: | ||
1218 | image_name = os.path.splitext(image)[0] | ||
1219 | timestr = time.strftime("-%Y%m%d-%H%M%S") | ||
1220 | dest = image_name + str(timestr) + ".bb" | ||
1221 | else: | ||
1222 | if not image.endswith(".bb"): | ||
1223 | dest = image + ".bb" | ||
1224 | else: | ||
1225 | dest = image | ||
1226 | |||
1227 | if base_image: | ||
1228 | with open(base_image, 'r') as f: | ||
1229 | require_line = f.readline() | ||
1230 | |||
1231 | with open(dest, "w") as imagefile: | ||
1232 | if base_image is None: | ||
1233 | imagefile.write("inherit image\n") | ||
1234 | else: | ||
1235 | topdir = self.data.getVar("TOPDIR") | ||
1236 | if topdir in base_image: | ||
1237 | base_image = require_line.split()[1] | ||
1238 | imagefile.write("require " + base_image + "\n") | ||
1239 | image_install = "IMAGE_INSTALL = \"" | ||
1240 | for package in package_queue: | ||
1241 | image_install += str(package) + " " | ||
1242 | image_install += "\"\n" | ||
1243 | imagefile.write(image_install) | ||
1244 | |||
1245 | description_var = "DESCRIPTION = \"" + description + "\"\n" | ||
1246 | imagefile.write(description_var) | ||
1247 | |||
1248 | self.state = state.initial | ||
1249 | if timestamp: | ||
1250 | return timestr | ||
1251 | |||
1252 | # This is called for all async commands when self.state != running | ||
1253 | def updateCache(self): | ||
1254 | if self.state == state.running: | ||
1255 | return | ||
1256 | |||
1257 | if self.state in (state.shutdown, state.forceshutdown): | ||
1258 | self.parser.shutdown(clean=False, force = True) | ||
1259 | raise bb.BBHandledException() | ||
1260 | |||
1261 | if self.state != state.parsing: | ||
1262 | self.parseConfiguration () | ||
1263 | |||
1264 | ignore = self.data.getVar("ASSUME_PROVIDED", True) or "" | ||
1265 | self.recipecache.ignored_dependencies = set(ignore.split()) | ||
1266 | |||
1267 | for dep in self.configuration.extra_assume_provided: | ||
1268 | self.recipecache.ignored_dependencies.add(dep) | ||
1269 | |||
1270 | self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities) | ||
1271 | (filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data) | ||
1272 | |||
1273 | self.data.renameVar("__depends", "__base_depends") | ||
1274 | |||
1275 | self.parser = CookerParser(self, filelist, masked) | ||
1276 | self.state = state.parsing | ||
1277 | |||
1278 | if not self.parser.parse_next(): | ||
1279 | collectlog.debug(1, "parsing complete") | ||
1280 | if self.parser.error: | ||
1281 | raise bb.BBHandledException() | ||
1282 | self.show_appends_with_no_recipes() | ||
1283 | self.handlePrefProviders() | ||
1284 | self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn) | ||
1285 | self.state = state.running | ||
1286 | return None | ||
1287 | |||
1288 | return True | ||
1289 | |||
1290 | def checkPackages(self, pkgs_to_build): | ||
1291 | |||
1292 | # Return a copy, don't modify the original | ||
1293 | pkgs_to_build = pkgs_to_build[:] | ||
1294 | |||
1295 | if len(pkgs_to_build) == 0: | ||
1296 | raise NothingToBuild | ||
1297 | |||
1298 | if 'world' in pkgs_to_build: | ||
1299 | self.buildWorldTargetList() | ||
1300 | pkgs_to_build.remove('world') | ||
1301 | for t in self.recipecache.world_target: | ||
1302 | pkgs_to_build.append(t) | ||
1303 | |||
1304 | if 'universe' in pkgs_to_build: | ||
1305 | parselog.warn("The \"universe\" target is only intended for testing and may produce errors.") | ||
1306 | parselog.debug(1, "collating packages for \"universe\"") | ||
1307 | pkgs_to_build.remove('universe') | ||
1308 | for t in self.recipecache.universe_target: | ||
1309 | pkgs_to_build.append(t) | ||
1310 | |||
1311 | return pkgs_to_build | ||
1312 | |||
1313 | |||
1314 | |||
1315 | |||
1316 | def pre_serve(self): | ||
1317 | # Empty the environment. The environment will be populated as | ||
1318 | # necessary from the data store. | ||
1319 | #bb.utils.empty_environment() | ||
1320 | try: | ||
1321 | self.prhost = prserv.serv.auto_start(self.data) | ||
1322 | except prserv.serv.PRServiceConfigError: | ||
1323 | bb.event.fire(CookerExit(), self.event_data) | ||
1324 | return | ||
1325 | |||
1326 | def post_serve(self): | ||
1327 | prserv.serv.auto_shutdown(self.data) | ||
1328 | bb.event.fire(CookerExit(), self.event_data) | ||
1329 | |||
1330 | def shutdown(self, force = False): | ||
1331 | if force: | ||
1332 | self.state = state.forceshutdown | ||
1333 | else: | ||
1334 | self.state = state.shutdown | ||
1335 | |||
1336 | def finishcommand(self): | ||
1337 | self.state = state.initial | ||
1338 | |||
1339 | def initialize(self): | ||
1340 | self.initConfigurationData() | ||
1341 | |||
1342 | def reset(self): | ||
1343 | self.loadConfigurationData() | ||
1344 | |||
1345 | def server_main(cooker, func, *args): | ||
1346 | cooker.pre_serve() | ||
1347 | |||
1348 | if cooker.configuration.profile: | ||
1349 | try: | ||
1350 | import cProfile as profile | ||
1351 | except: | ||
1352 | import profile | ||
1353 | prof = profile.Profile() | ||
1354 | |||
1355 | ret = profile.Profile.runcall(prof, func, *args) | ||
1356 | |||
1357 | prof.dump_stats("profile.log") | ||
1358 | bb.utils.process_profilelog("profile.log") | ||
1359 | print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed") | ||
1360 | |||
1361 | else: | ||
1362 | ret = func(*args) | ||
1363 | |||
1364 | cooker.post_serve() | ||
1365 | |||
1366 | return ret | ||
1367 | |||
1368 | class CookerExit(bb.event.Event): | ||
1369 | """ | ||
1370 | Notify clients of the Cooker shutdown | ||
1371 | """ | ||
1372 | |||
1373 | def __init__(self): | ||
1374 | bb.event.Event.__init__(self) | ||
1375 | |||
1376 | |||
1377 | class CookerCollectFiles(object): | ||
1378 | def __init__(self, priorities): | ||
1379 | self.appendlist = {} | ||
1380 | self.bbfile_config_priorities = priorities | ||
1381 | |||
1382 | def calc_bbfile_priority( self, filename, matched = None ): | ||
1383 | for _, _, regex, pri in self.bbfile_config_priorities: | ||
1384 | if regex.match(filename): | ||
1385 | if matched != None: | ||
1386 | if not regex in matched: | ||
1387 | matched.add(regex) | ||
1388 | return pri | ||
1389 | return 0 | ||
1390 | |||
1391 | def get_bbfiles(self): | ||
1392 | """Get list of default .bb files by reading out the current directory""" | ||
1393 | path = os.getcwd() | ||
1394 | contents = os.listdir(path) | ||
1395 | bbfiles = [] | ||
1396 | for f in contents: | ||
1397 | if f.endswith(".bb"): | ||
1398 | bbfiles.append(os.path.abspath(os.path.join(path, f))) | ||
1399 | return bbfiles | ||
1400 | |||
1401 | def find_bbfiles(self, path): | ||
1402 | """Find all the .bb and .bbappend files in a directory""" | ||
1403 | found = [] | ||
1404 | for dir, dirs, files in os.walk(path): | ||
1405 | for ignored in ('SCCS', 'CVS', '.svn'): | ||
1406 | if ignored in dirs: | ||
1407 | dirs.remove(ignored) | ||
1408 | found += [os.path.join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))] | ||
1409 | |||
1410 | return found | ||
1411 | |||
1412 | def collect_bbfiles(self, config, eventdata): | ||
1413 | """Collect all available .bb build files""" | ||
1414 | masked = 0 | ||
1415 | |||
1416 | collectlog.debug(1, "collecting .bb files") | ||
1417 | |||
1418 | files = (config.getVar( "BBFILES", True) or "").split() | ||
1419 | config.setVar("BBFILES", " ".join(files)) | ||
1420 | |||
1421 | # Sort files by priority | ||
1422 | files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) ) | ||
1423 | |||
1424 | if not len(files): | ||
1425 | files = self.get_bbfiles() | ||
1426 | |||
1427 | if not len(files): | ||
1428 | collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") | ||
1429 | bb.event.fire(CookerExit(), eventdata) | ||
1430 | |||
1431 | # Can't use set here as order is important | ||
1432 | newfiles = [] | ||
1433 | for f in files: | ||
1434 | if os.path.isdir(f): | ||
1435 | dirfiles = self.find_bbfiles(f) | ||
1436 | for g in dirfiles: | ||
1437 | if g not in newfiles: | ||
1438 | newfiles.append(g) | ||
1439 | else: | ||
1440 | globbed = glob.glob(f) | ||
1441 | if not globbed and os.path.exists(f): | ||
1442 | globbed = [f] | ||
1443 | for g in globbed: | ||
1444 | if g not in newfiles: | ||
1445 | newfiles.append(g) | ||
1446 | |||
1447 | bbmask = config.getVar('BBMASK', True) | ||
1448 | |||
1449 | if bbmask: | ||
1450 | try: | ||
1451 | bbmask_compiled = re.compile(bbmask) | ||
1452 | except sre_constants.error: | ||
1453 | collectlog.critical("BBMASK is not a valid regular expression, ignoring.") | ||
1454 | return list(newfiles), 0 | ||
1455 | |||
1456 | bbfiles = [] | ||
1457 | bbappend = [] | ||
1458 | for f in newfiles: | ||
1459 | if bbmask and bbmask_compiled.search(f): | ||
1460 | collectlog.debug(1, "skipping masked file %s", f) | ||
1461 | masked += 1 | ||
1462 | continue | ||
1463 | if f.endswith('.bb'): | ||
1464 | bbfiles.append(f) | ||
1465 | elif f.endswith('.bbappend'): | ||
1466 | bbappend.append(f) | ||
1467 | else: | ||
1468 | collectlog.debug(1, "skipping %s: unknown file extension", f) | ||
1469 | |||
1470 | # Build a list of .bbappend files for each .bb file | ||
1471 | for f in bbappend: | ||
1472 | base = os.path.basename(f).replace('.bbappend', '.bb') | ||
1473 | if not base in self.appendlist: | ||
1474 | self.appendlist[base] = [] | ||
1475 | if f not in self.appendlist[base]: | ||
1476 | self.appendlist[base].append(f) | ||
1477 | |||
1478 | # Find overlayed recipes | ||
1479 | # bbfiles will be in priority order which makes this easy | ||
1480 | bbfile_seen = dict() | ||
1481 | self.overlayed = defaultdict(list) | ||
1482 | for f in reversed(bbfiles): | ||
1483 | base = os.path.basename(f) | ||
1484 | if base not in bbfile_seen: | ||
1485 | bbfile_seen[base] = f | ||
1486 | else: | ||
1487 | topfile = bbfile_seen[base] | ||
1488 | self.overlayed[topfile].append(f) | ||
1489 | |||
1490 | return (bbfiles, masked) | ||
1491 | |||
1492 | def get_file_appends(self, fn): | ||
1493 | """ | ||
1494 | Returns a list of .bbappend files to apply to fn | ||
1495 | """ | ||
1496 | f = os.path.basename(fn) | ||
1497 | if f in self.appendlist: | ||
1498 | return self.appendlist[f] | ||
1499 | return [] | ||
1500 | |||
1501 | def collection_priorities(self, pkgfns): | ||
1502 | |||
1503 | priorities = {} | ||
1504 | |||
1505 | # Calculate priorities for each file | ||
1506 | matched = set() | ||
1507 | for p in pkgfns: | ||
1508 | realfn, cls = bb.cache.Cache.virtualfn2realfn(p) | ||
1509 | priorities[p] = self.calc_bbfile_priority(realfn, matched) | ||
1510 | |||
1511 | # Don't show the warning if the BBFILE_PATTERN did match .bbappend files | ||
1512 | unmatched = set() | ||
1513 | for _, _, regex, pri in self.bbfile_config_priorities: | ||
1514 | if not regex in matched: | ||
1515 | unmatched.add(regex) | ||
1516 | |||
1517 | def findmatch(regex): | ||
1518 | for bbfile in self.appendlist: | ||
1519 | for append in self.appendlist[bbfile]: | ||
1520 | if regex.match(append): | ||
1521 | return True | ||
1522 | return False | ||
1523 | |||
1524 | for unmatch in unmatched.copy(): | ||
1525 | if findmatch(unmatch): | ||
1526 | unmatched.remove(unmatch) | ||
1527 | |||
1528 | for collection, pattern, regex, _ in self.bbfile_config_priorities: | ||
1529 | if regex in unmatched: | ||
1530 | collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern)) | ||
1531 | |||
1532 | return priorities | ||
1533 | |||
1534 | class ParsingFailure(Exception): | ||
1535 | def __init__(self, realexception, recipe): | ||
1536 | self.realexception = realexception | ||
1537 | self.recipe = recipe | ||
1538 | Exception.__init__(self, realexception, recipe) | ||
1539 | |||
1540 | class Feeder(multiprocessing.Process): | ||
1541 | def __init__(self, jobs, to_parsers, quit): | ||
1542 | self.quit = quit | ||
1543 | self.jobs = jobs | ||
1544 | self.to_parsers = to_parsers | ||
1545 | multiprocessing.Process.__init__(self) | ||
1546 | |||
1547 | def run(self): | ||
1548 | while True: | ||
1549 | try: | ||
1550 | quit = self.quit.get_nowait() | ||
1551 | except Queue.Empty: | ||
1552 | pass | ||
1553 | else: | ||
1554 | if quit == 'cancel': | ||
1555 | self.to_parsers.cancel_join_thread() | ||
1556 | break | ||
1557 | |||
1558 | try: | ||
1559 | job = self.jobs.pop() | ||
1560 | except IndexError: | ||
1561 | break | ||
1562 | |||
1563 | try: | ||
1564 | self.to_parsers.put(job, timeout=0.5) | ||
1565 | except Queue.Full: | ||
1566 | self.jobs.insert(0, job) | ||
1567 | continue | ||
1568 | |||
1569 | class Parser(multiprocessing.Process): | ||
1570 | def __init__(self, jobs, results, quit, init, profile): | ||
1571 | self.jobs = jobs | ||
1572 | self.results = results | ||
1573 | self.quit = quit | ||
1574 | self.init = init | ||
1575 | multiprocessing.Process.__init__(self) | ||
1576 | self.context = bb.utils.get_context().copy() | ||
1577 | self.handlers = bb.event.get_class_handlers().copy() | ||
1578 | self.profile = profile | ||
1579 | |||
1580 | def run(self): | ||
1581 | |||
1582 | if not self.profile: | ||
1583 | self.realrun() | ||
1584 | return | ||
1585 | |||
1586 | try: | ||
1587 | import cProfile as profile | ||
1588 | except: | ||
1589 | import profile | ||
1590 | prof = profile.Profile() | ||
1591 | try: | ||
1592 | profile.Profile.runcall(prof, self.realrun) | ||
1593 | finally: | ||
1594 | logfile = "profile-parse-%s.log" % multiprocessing.current_process().name | ||
1595 | prof.dump_stats(logfile) | ||
1596 | bb.utils.process_profilelog(logfile) | ||
1597 | print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile)) | ||
1598 | |||
1599 | def realrun(self): | ||
1600 | if self.init: | ||
1601 | self.init() | ||
1602 | |||
1603 | pending = [] | ||
1604 | while True: | ||
1605 | try: | ||
1606 | self.quit.get_nowait() | ||
1607 | except Queue.Empty: | ||
1608 | pass | ||
1609 | else: | ||
1610 | self.results.cancel_join_thread() | ||
1611 | break | ||
1612 | |||
1613 | if pending: | ||
1614 | result = pending.pop() | ||
1615 | else: | ||
1616 | try: | ||
1617 | job = self.jobs.get(timeout=0.25) | ||
1618 | except Queue.Empty: | ||
1619 | continue | ||
1620 | |||
1621 | if job is None: | ||
1622 | break | ||
1623 | result = self.parse(*job) | ||
1624 | |||
1625 | try: | ||
1626 | self.results.put(result, timeout=0.25) | ||
1627 | except Queue.Full: | ||
1628 | pending.append(result) | ||
1629 | |||
1630 | def parse(self, filename, appends, caches_array): | ||
1631 | try: | ||
1632 | # Reset our environment and handlers to the original settings | ||
1633 | bb.utils.set_context(self.context.copy()) | ||
1634 | bb.event.set_class_handlers(self.handlers.copy()) | ||
1635 | return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array) | ||
1636 | except Exception as exc: | ||
1637 | tb = sys.exc_info()[2] | ||
1638 | exc.recipe = filename | ||
1639 | exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) | ||
1640 | return True, exc | ||
1641 | # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown | ||
1642 | # and for example a worker thread doesn't just exit on its own in response to | ||
1643 | # a SystemExit event for example. | ||
1644 | except BaseException as exc: | ||
1645 | return True, ParsingFailure(exc, filename) | ||
1646 | |||
1647 | class CookerParser(object): | ||
1648 | def __init__(self, cooker, filelist, masked): | ||
1649 | self.filelist = filelist | ||
1650 | self.cooker = cooker | ||
1651 | self.cfgdata = cooker.data | ||
1652 | self.cfghash = cooker.data_hash | ||
1653 | |||
1654 | # Accounting statistics | ||
1655 | self.parsed = 0 | ||
1656 | self.cached = 0 | ||
1657 | self.error = 0 | ||
1658 | self.masked = masked | ||
1659 | |||
1660 | self.skipped = 0 | ||
1661 | self.virtuals = 0 | ||
1662 | self.total = len(filelist) | ||
1663 | |||
1664 | self.current = 0 | ||
1665 | self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or | ||
1666 | multiprocessing.cpu_count()) | ||
1667 | |||
1668 | self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array) | ||
1669 | self.fromcache = [] | ||
1670 | self.willparse = [] | ||
1671 | for filename in self.filelist: | ||
1672 | appends = self.cooker.collection.get_file_appends(filename) | ||
1673 | if not self.bb_cache.cacheValid(filename, appends): | ||
1674 | self.willparse.append((filename, appends, cooker.caches_array)) | ||
1675 | else: | ||
1676 | self.fromcache.append((filename, appends)) | ||
1677 | self.toparse = self.total - len(self.fromcache) | ||
1678 | self.progress_chunk = max(self.toparse / 100, 1) | ||
1679 | |||
1680 | self.start() | ||
1681 | self.haveshutdown = False | ||
1682 | |||
1683 | def start(self): | ||
1684 | self.results = self.load_cached() | ||
1685 | self.processes = [] | ||
1686 | if self.toparse: | ||
1687 | bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) | ||
1688 | def init(): | ||
1689 | Parser.cfg = self.cfgdata | ||
1690 | multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1) | ||
1691 | multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1) | ||
1692 | |||
1693 | self.feeder_quit = multiprocessing.Queue(maxsize=1) | ||
1694 | self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes) | ||
1695 | self.jobs = multiprocessing.Queue(maxsize=self.num_processes) | ||
1696 | self.result_queue = multiprocessing.Queue() | ||
1697 | self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit) | ||
1698 | self.feeder.start() | ||
1699 | for i in range(0, self.num_processes): | ||
1700 | parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile) | ||
1701 | parser.start() | ||
1702 | self.processes.append(parser) | ||
1703 | |||
1704 | self.results = itertools.chain(self.results, self.parse_generator()) | ||
1705 | |||
1706 | def shutdown(self, clean=True, force=False): | ||
1707 | if not self.toparse: | ||
1708 | return | ||
1709 | if self.haveshutdown: | ||
1710 | return | ||
1711 | self.haveshutdown = True | ||
1712 | |||
1713 | if clean: | ||
1714 | event = bb.event.ParseCompleted(self.cached, self.parsed, | ||
1715 | self.skipped, self.masked, | ||
1716 | self.virtuals, self.error, | ||
1717 | self.total) | ||
1718 | |||
1719 | bb.event.fire(event, self.cfgdata) | ||
1720 | self.feeder_quit.put(None) | ||
1721 | for process in self.processes: | ||
1722 | self.jobs.put(None) | ||
1723 | else: | ||
1724 | self.feeder_quit.put('cancel') | ||
1725 | |||
1726 | self.parser_quit.cancel_join_thread() | ||
1727 | for process in self.processes: | ||
1728 | self.parser_quit.put(None) | ||
1729 | |||
1730 | self.jobs.cancel_join_thread() | ||
1731 | |||
1732 | for process in self.processes: | ||
1733 | if force: | ||
1734 | process.join(.1) | ||
1735 | process.terminate() | ||
1736 | else: | ||
1737 | process.join() | ||
1738 | self.feeder.join() | ||
1739 | |||
1740 | sync = threading.Thread(target=self.bb_cache.sync) | ||
1741 | sync.start() | ||
1742 | multiprocessing.util.Finalize(None, sync.join, exitpriority=-100) | ||
1743 | bb.codeparser.parser_cache_savemerge(self.cooker.data) | ||
1744 | bb.fetch.fetcher_parse_done(self.cooker.data) | ||
1745 | |||
1746 | def load_cached(self): | ||
1747 | for filename, appends in self.fromcache: | ||
1748 | cached, infos = self.bb_cache.load(filename, appends, self.cfgdata) | ||
1749 | yield not cached, infos | ||
1750 | |||
1751 | def parse_generator(self): | ||
1752 | while True: | ||
1753 | if self.parsed >= self.toparse: | ||
1754 | break | ||
1755 | |||
1756 | try: | ||
1757 | result = self.result_queue.get(timeout=0.25) | ||
1758 | except Queue.Empty: | ||
1759 | pass | ||
1760 | else: | ||
1761 | value = result[1] | ||
1762 | if isinstance(value, BaseException): | ||
1763 | raise value | ||
1764 | else: | ||
1765 | yield result | ||
1766 | |||
1767 | def parse_next(self): | ||
1768 | result = [] | ||
1769 | parsed = None | ||
1770 | try: | ||
1771 | parsed, result = self.results.next() | ||
1772 | except StopIteration: | ||
1773 | self.shutdown() | ||
1774 | return False | ||
1775 | except bb.BBHandledException as exc: | ||
1776 | self.error += 1 | ||
1777 | logger.error('Failed to parse recipe: %s' % exc.recipe) | ||
1778 | self.shutdown(clean=False) | ||
1779 | return False | ||
1780 | except ParsingFailure as exc: | ||
1781 | self.error += 1 | ||
1782 | logger.error('Unable to parse %s: %s' % | ||
1783 | (exc.recipe, bb.exceptions.to_string(exc.realexception))) | ||
1784 | self.shutdown(clean=False) | ||
1785 | return False | ||
1786 | except bb.parse.ParseError as exc: | ||
1787 | self.error += 1 | ||
1788 | logger.error(str(exc)) | ||
1789 | self.shutdown(clean=False) | ||
1790 | return False | ||
1791 | except bb.data_smart.ExpansionError as exc: | ||
1792 | self.error += 1 | ||
1793 | _, value, _ = sys.exc_info() | ||
1794 | logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc)) | ||
1795 | self.shutdown(clean=False) | ||
1796 | return False | ||
1797 | except SyntaxError as exc: | ||
1798 | self.error += 1 | ||
1799 | logger.error('Unable to parse %s', exc.recipe) | ||
1800 | self.shutdown(clean=False) | ||
1801 | return False | ||
1802 | except Exception as exc: | ||
1803 | self.error += 1 | ||
1804 | etype, value, tb = sys.exc_info() | ||
1805 | if hasattr(value, "recipe"): | ||
1806 | logger.error('Unable to parse %s', value.recipe, | ||
1807 | exc_info=(etype, value, exc.traceback)) | ||
1808 | else: | ||
1809 | # Most likely, an exception occurred during raising an exception | ||
1810 | import traceback | ||
1811 | logger.error('Exception during parse: %s' % traceback.format_exc()) | ||
1812 | self.shutdown(clean=False) | ||
1813 | return False | ||
1814 | |||
1815 | self.current += 1 | ||
1816 | self.virtuals += len(result) | ||
1817 | if parsed: | ||
1818 | self.parsed += 1 | ||
1819 | if self.parsed % self.progress_chunk == 0: | ||
1820 | bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse), | ||
1821 | self.cfgdata) | ||
1822 | else: | ||
1823 | self.cached += 1 | ||
1824 | |||
1825 | for virtualfn, info_array in result: | ||
1826 | if info_array[0].skipped: | ||
1827 | self.skipped += 1 | ||
1828 | self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) | ||
1829 | self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache, | ||
1830 | parsed=parsed) | ||
1831 | return True | ||
1832 | |||
1833 | def reparse(self, filename): | ||
1834 | infos = self.bb_cache.parse(filename, | ||
1835 | self.cooker.collection.get_file_appends(filename), | ||
1836 | self.cfgdata, self.cooker.caches_array) | ||
1837 | for vfn, info_array in infos: | ||
1838 | self.cooker.recipecache.add_from_recipeinfo(vfn, info_array) | ||