diff options
Diffstat (limited to 'bitbake-dev/lib/bb/cooker.py')
-rw-r--r-- | bitbake-dev/lib/bb/cooker.py | 941 |
1 files changed, 941 insertions, 0 deletions
diff --git a/bitbake-dev/lib/bb/cooker.py b/bitbake-dev/lib/bb/cooker.py new file mode 100644 index 0000000000..c92ad70a2c --- /dev/null +++ b/bitbake-dev/lib/bb/cooker.py | |||
@@ -0,0 +1,941 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | # | ||
5 | # Copyright (C) 2003, 2004 Chris Larson | ||
6 | # Copyright (C) 2003, 2004 Phil Blundell | ||
7 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
8 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
9 | # Copyright (C) 2005 ROAD GmbH | ||
10 | # Copyright (C) 2006 - 2007 Richard Purdie | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify | ||
13 | # it under the terms of the GNU General Public License version 2 as | ||
14 | # published by the Free Software Foundation. | ||
15 | # | ||
16 | # This program is distributed in the hope that it will be useful, | ||
17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
19 | # GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along | ||
22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
24 | |||
25 | import sys, os, getopt, glob, copy, os.path, re, time | ||
26 | import bb | ||
27 | from bb import utils, data, parse, event, cache, providers, taskdata, runqueue | ||
28 | from bb import xmlrpcserver, command | ||
29 | from sets import Set | ||
30 | import itertools, sre_constants | ||
31 | |||
32 | class MultipleMatches(Exception): | ||
33 | """ | ||
34 | Exception raised when multiple file matches are found | ||
35 | """ | ||
36 | |||
37 | class ParsingErrorsFound(Exception): | ||
38 | """ | ||
39 | Exception raised when parsing errors are found | ||
40 | """ | ||
41 | |||
42 | class NothingToBuild(Exception): | ||
43 | """ | ||
44 | Exception raised when there is nothing to build | ||
45 | """ | ||
46 | |||
47 | |||
48 | # Different states cooker can be in | ||
49 | cookerClean = 1 | ||
50 | cookerParsed = 2 | ||
51 | |||
52 | # Different action states the cooker can be in | ||
53 | cookerRun = 1 # Cooker is running normally | ||
54 | cookerShutdown = 2 # Active tasks should be brought to a controlled stop | ||
55 | cookerStop = 3 # Stop, now! | ||
56 | |||
57 | #============================================================================# | ||
58 | # BBCooker | ||
59 | #============================================================================# | ||
60 | class BBCooker: | ||
61 | """ | ||
62 | Manages one bitbake build run | ||
63 | """ | ||
64 | |||
65 | def __init__(self, configuration): | ||
66 | self.status = None | ||
67 | |||
68 | self.cache = None | ||
69 | self.bb_cache = None | ||
70 | |||
71 | self.server = bb.xmlrpcserver.BitBakeXMLRPCServer(self) | ||
72 | #self.server.register_function(self.showEnvironment) | ||
73 | |||
74 | self.configuration = configuration | ||
75 | |||
76 | if self.configuration.verbose: | ||
77 | bb.msg.set_verbose(True) | ||
78 | |||
79 | if self.configuration.debug: | ||
80 | bb.msg.set_debug_level(self.configuration.debug) | ||
81 | else: | ||
82 | bb.msg.set_debug_level(0) | ||
83 | |||
84 | if self.configuration.debug_domains: | ||
85 | bb.msg.set_debug_domains(self.configuration.debug_domains) | ||
86 | |||
87 | self.configuration.data = bb.data.init() | ||
88 | |||
89 | for f in self.configuration.file: | ||
90 | self.parseConfigurationFile( f ) | ||
91 | |||
92 | self.parseConfigurationFile( os.path.join( "conf", "bitbake.conf" ) ) | ||
93 | |||
94 | if not self.configuration.cmd: | ||
95 | self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build" | ||
96 | |||
97 | bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True) | ||
98 | if bbpkgs: | ||
99 | self.configuration.pkgs_to_build.extend(bbpkgs.split()) | ||
100 | |||
101 | # | ||
102 | # Special updated configuration we use for firing events | ||
103 | # | ||
104 | self.configuration.event_data = bb.data.createCopy(self.configuration.data) | ||
105 | bb.data.update_data(self.configuration.event_data) | ||
106 | |||
107 | # TOSTOP must not be set or our children will hang when they output | ||
108 | fd = sys.stdout.fileno() | ||
109 | if os.isatty(fd): | ||
110 | import termios | ||
111 | tcattr = termios.tcgetattr(fd) | ||
112 | if tcattr[3] & termios.TOSTOP: | ||
113 | bb.msg.note(1, bb.msg.domain.Build, "The terminal had the TOSTOP bit set, clearing...") | ||
114 | tcattr[3] = tcattr[3] & ~termios.TOSTOP | ||
115 | termios.tcsetattr(fd, termios.TCSANOW, tcattr) | ||
116 | |||
117 | # Change nice level if we're asked to | ||
118 | nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True) | ||
119 | if nice: | ||
120 | curnice = os.nice(0) | ||
121 | nice = int(nice) - curnice | ||
122 | bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice)) | ||
123 | |||
124 | # Parse any commandline into actions | ||
125 | if self.configuration.show_environment: | ||
126 | self.commandlineAction = None | ||
127 | |||
128 | if 'world' in self.configuration.pkgs_to_build: | ||
129 | bb.error("'world' is not a valid target for --environment.") | ||
130 | elif len(self.configuration.pkgs_to_build) > 1: | ||
131 | bb.error("Only one target can be used with the --environment option.") | ||
132 | elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0: | ||
133 | bb.error("No target should be used with the --environment and --buildfile options.") | ||
134 | else: | ||
135 | self.commandlineAction = ["showEnvironment", self.configuration.buildfile, self.configuration.pkgs_to_build] | ||
136 | elif self.configuration.buildfile is not None: | ||
137 | self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd] | ||
138 | elif self.configuration.show_versions: | ||
139 | self.commandlineAction = ["showVersions"] | ||
140 | elif self.configuration.parse_only: | ||
141 | self.commandlineAction = ["parseFiles"] | ||
142 | elif self.configuration.dot_graph: | ||
143 | if self.configuration.pkgs_to_build: | ||
144 | self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build] | ||
145 | else: | ||
146 | self.commandlineAction = None | ||
147 | bb.error("Please specify a package name for dependency graph generation.") | ||
148 | else: | ||
149 | if self.configuration.pkgs_to_build: | ||
150 | self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build] | ||
151 | else: | ||
152 | self.commandlineAction = None | ||
153 | bb.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") | ||
154 | |||
155 | # FIXME - implement | ||
156 | #if self.configuration.interactive: | ||
157 | # self.interactiveMode() | ||
158 | |||
159 | self.command = bb.command.Command(self) | ||
160 | self.cookerIdle = True | ||
161 | self.cookerState = cookerClean | ||
162 | self.cookerAction = cookerRun | ||
163 | self.server.register_idle_function(self.runCommands, self) | ||
164 | |||
165 | |||
166 | def runCommands(self, server, data, abort): | ||
167 | """ | ||
168 | Run any queued offline command | ||
169 | This is done by the idle handler so it runs in true context rather than | ||
170 | tied to any UI. | ||
171 | """ | ||
172 | if self.cookerIdle and not abort: | ||
173 | self.command.runOfflineCommand() | ||
174 | |||
175 | # Always reschedule | ||
176 | return True | ||
177 | |||
178 | def tryBuildPackage(self, fn, item, task, the_data): | ||
179 | """ | ||
180 | Build one task of a package, optionally build following task depends | ||
181 | """ | ||
182 | bb.event.fire(bb.event.PkgStarted(item, the_data)) | ||
183 | try: | ||
184 | if not self.configuration.dry_run: | ||
185 | bb.build.exec_task('do_%s' % task, the_data) | ||
186 | bb.event.fire(bb.event.PkgSucceeded(item, the_data)) | ||
187 | return True | ||
188 | except bb.build.FuncFailed: | ||
189 | bb.msg.error(bb.msg.domain.Build, "task stack execution failed") | ||
190 | bb.event.fire(bb.event.PkgFailed(item, the_data)) | ||
191 | raise | ||
192 | except bb.build.EventException, e: | ||
193 | event = e.args[1] | ||
194 | bb.msg.error(bb.msg.domain.Build, "%s event exception, aborting" % bb.event.getName(event)) | ||
195 | bb.event.fire(bb.event.PkgFailed(item, the_data)) | ||
196 | raise | ||
197 | |||
198 | def tryBuild(self, fn): | ||
199 | """ | ||
200 | Build a provider and its dependencies. | ||
201 | build_depends is a list of previous build dependencies (not runtime) | ||
202 | If build_depends is empty, we're dealing with a runtime depends | ||
203 | """ | ||
204 | |||
205 | the_data = self.bb_cache.loadDataFull(fn, self.configuration.data) | ||
206 | |||
207 | item = self.status.pkg_fn[fn] | ||
208 | |||
209 | #if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data): | ||
210 | # return True | ||
211 | |||
212 | return self.tryBuildPackage(fn, item, self.configuration.cmd, the_data) | ||
213 | |||
214 | def showVersions(self): | ||
215 | |||
216 | # Need files parsed | ||
217 | self.updateCache() | ||
218 | |||
219 | pkg_pn = self.status.pkg_pn | ||
220 | preferred_versions = {} | ||
221 | latest_versions = {} | ||
222 | |||
223 | # Sort by priority | ||
224 | for pn in pkg_pn.keys(): | ||
225 | (last_ver,last_file,pref_ver,pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status) | ||
226 | preferred_versions[pn] = (pref_ver, pref_file) | ||
227 | latest_versions[pn] = (last_ver, last_file) | ||
228 | |||
229 | pkg_list = pkg_pn.keys() | ||
230 | pkg_list.sort() | ||
231 | |||
232 | bb.msg.plain("%-35s %25s %25s" % ("Package Name", "Latest Version", "Preferred Version")) | ||
233 | bb.msg.plain("%-35s %25s %25s\n" % ("============", "==============", "=================")) | ||
234 | |||
235 | for p in pkg_list: | ||
236 | pref = preferred_versions[p] | ||
237 | latest = latest_versions[p] | ||
238 | |||
239 | prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2] | ||
240 | lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] | ||
241 | |||
242 | if pref == latest: | ||
243 | prefstr = "" | ||
244 | |||
245 | bb.msg.plain("%-35s %25s %25s" % (p, lateststr, prefstr)) | ||
246 | |||
247 | def showEnvironment(self, buildfile = None, pkgs_to_build = []): | ||
248 | """ | ||
249 | Show the outer or per-package environment | ||
250 | """ | ||
251 | fn = None | ||
252 | envdata = None | ||
253 | |||
254 | if buildfile: | ||
255 | self.cb = None | ||
256 | self.bb_cache = bb.cache.init(self) | ||
257 | fn = self.matchFile(buildfile) | ||
258 | elif len(pkgs_to_build) == 1: | ||
259 | self.updateCache() | ||
260 | |||
261 | localdata = data.createCopy(self.configuration.data) | ||
262 | bb.data.update_data(localdata) | ||
263 | bb.data.expandKeys(localdata) | ||
264 | |||
265 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
266 | taskdata.add_provider(localdata, self.status, pkgs_to_build[0]) | ||
267 | taskdata.add_unresolved(localdata, self.status) | ||
268 | |||
269 | targetid = taskdata.getbuild_id(pkgs_to_build[0]) | ||
270 | fnid = taskdata.build_targets[targetid][0] | ||
271 | fn = taskdata.fn_index[fnid] | ||
272 | else: | ||
273 | envdata = self.configuration.data | ||
274 | |||
275 | if fn: | ||
276 | try: | ||
277 | envdata = self.bb_cache.loadDataFull(fn, self.configuration.data) | ||
278 | except IOError, e: | ||
279 | bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e)) | ||
280 | raise | ||
281 | except Exception, e: | ||
282 | bb.msg.error(bb.msg.domain.Parsing, "%s" % e) | ||
283 | raise | ||
284 | |||
285 | class dummywrite: | ||
286 | def __init__(self): | ||
287 | self.writebuf = "" | ||
288 | def write(self, output): | ||
289 | self.writebuf = self.writebuf + output | ||
290 | |||
291 | # emit variables and shell functions | ||
292 | try: | ||
293 | data.update_data(envdata) | ||
294 | wb = dummywrite() | ||
295 | data.emit_env(wb, envdata, True) | ||
296 | bb.msg.plain(wb.writebuf) | ||
297 | except Exception, e: | ||
298 | bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) | ||
299 | # emit the metadata which isnt valid shell | ||
300 | data.expandKeys(envdata) | ||
301 | for e in envdata.keys(): | ||
302 | if data.getVarFlag( e, 'python', envdata ): | ||
303 | bb.msg.plain("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1))) | ||
304 | |||
305 | def generateDepTreeData(self, pkgs_to_build): | ||
306 | """ | ||
307 | Create a dependency tree of pkgs_to_build, returning the data. | ||
308 | """ | ||
309 | |||
310 | # Need files parsed | ||
311 | self.updateCache() | ||
312 | |||
313 | pkgs_to_build = self.checkPackages(pkgs_to_build) | ||
314 | |||
315 | localdata = data.createCopy(self.configuration.data) | ||
316 | bb.data.update_data(localdata) | ||
317 | bb.data.expandKeys(localdata) | ||
318 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
319 | |||
320 | runlist = [] | ||
321 | for k in pkgs_to_build: | ||
322 | taskdata.add_provider(localdata, self.status, k) | ||
323 | runlist.append([k, "do_%s" % self.configuration.cmd]) | ||
324 | taskdata.add_unresolved(localdata, self.status) | ||
325 | |||
326 | rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) | ||
327 | rq.prepare_runqueue() | ||
328 | |||
329 | seen_fnids = [] | ||
330 | depend_tree = {} | ||
331 | depend_tree["depends"] = {} | ||
332 | depend_tree["tdepends"] = {} | ||
333 | depend_tree["pn"] = {} | ||
334 | depend_tree["rdepends-pn"] = {} | ||
335 | depend_tree["packages"] = {} | ||
336 | depend_tree["rdepends-pkg"] = {} | ||
337 | depend_tree["rrecs-pkg"] = {} | ||
338 | |||
339 | for task in range(len(rq.runq_fnid)): | ||
340 | taskname = rq.runq_task[task] | ||
341 | fnid = rq.runq_fnid[task] | ||
342 | fn = taskdata.fn_index[fnid] | ||
343 | pn = self.status.pkg_fn[fn] | ||
344 | version = "%s:%s-%s" % self.status.pkg_pepvpr[fn] | ||
345 | if pn not in depend_tree["pn"]: | ||
346 | depend_tree["pn"][pn] = {} | ||
347 | depend_tree["pn"][pn]["filename"] = fn | ||
348 | depend_tree["pn"][pn]["version"] = version | ||
349 | for dep in rq.runq_depends[task]: | ||
350 | depfn = taskdata.fn_index[rq.runq_fnid[dep]] | ||
351 | deppn = self.status.pkg_fn[depfn] | ||
352 | dotname = "%s.%s" % (pn, rq.runq_task[task]) | ||
353 | if not dotname in depend_tree["tdepends"]: | ||
354 | depend_tree["tdepends"][dotname] = [] | ||
355 | depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.runq_task[dep])) | ||
356 | if fnid not in seen_fnids: | ||
357 | seen_fnids.append(fnid) | ||
358 | packages = [] | ||
359 | |||
360 | depend_tree["depends"][pn] = [] | ||
361 | for dep in taskdata.depids[fnid]: | ||
362 | depend_tree["depends"][pn].append(taskdata.build_names_index[dep]) | ||
363 | |||
364 | depend_tree["rdepends-pn"][pn] = [] | ||
365 | for rdep in taskdata.rdepids[fnid]: | ||
366 | depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep]) | ||
367 | |||
368 | rdepends = self.status.rundeps[fn] | ||
369 | for package in rdepends: | ||
370 | depend_tree["rdepends-pkg"][package] = [] | ||
371 | for rdepend in rdepends[package]: | ||
372 | depend_tree["rdepends-pkg"][package].append(rdepend) | ||
373 | packages.append(package) | ||
374 | |||
375 | rrecs = self.status.runrecs[fn] | ||
376 | for package in rrecs: | ||
377 | depend_tree["rrecs-pkg"][package] = [] | ||
378 | for rdepend in rrecs[package]: | ||
379 | depend_tree["rrecs-pkg"][package].append(rdepend) | ||
380 | if not package in packages: | ||
381 | packages.append(package) | ||
382 | |||
383 | for package in packages: | ||
384 | if package not in depend_tree["packages"]: | ||
385 | depend_tree["packages"][package] = {} | ||
386 | depend_tree["packages"][package]["pn"] = pn | ||
387 | depend_tree["packages"][package]["filename"] = fn | ||
388 | depend_tree["packages"][package]["version"] = version | ||
389 | |||
390 | return depend_tree | ||
391 | |||
392 | |||
393 | def generateDepTreeEvent(self, pkgs_to_build): | ||
394 | """ | ||
395 | Create a task dependency graph of pkgs_to_build. | ||
396 | Generate an event with the result | ||
397 | """ | ||
398 | depgraph = self.generateDepTreeData(pkgs_to_build) | ||
399 | bb.event.fire(bb.event.DepTreeGenerated(self.configuration.data, depgraph)) | ||
400 | |||
401 | def generateDotGraphFiles(self, pkgs_to_build): | ||
402 | """ | ||
403 | Create a task dependency graph of pkgs_to_build. | ||
404 | Save the result to a set of .dot files. | ||
405 | """ | ||
406 | |||
407 | depgraph = self.generateDepTreeData(pkgs_to_build) | ||
408 | |||
409 | # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn | ||
410 | depends_file = file('pn-depends.dot', 'w' ) | ||
411 | print >> depends_file, "digraph depends {" | ||
412 | for pn in depgraph["pn"]: | ||
413 | fn = depgraph["pn"][pn]["filename"] | ||
414 | version = depgraph["pn"][pn]["version"] | ||
415 | print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn) | ||
416 | for pn in depgraph["depends"]: | ||
417 | for depend in depgraph["depends"][pn]: | ||
418 | print >> depends_file, '"%s" -> "%s"' % (pn, depend) | ||
419 | for pn in depgraph["rdepends-pn"]: | ||
420 | for rdepend in depgraph["rdepends-pn"][pn]: | ||
421 | print >> depends_file, '"%s" -> "%s" [style=dashed]' % (pn, rdepend) | ||
422 | print >> depends_file, "}" | ||
423 | bb.msg.plain("PN dependencies saved to 'pn-depends.dot'") | ||
424 | |||
425 | depends_file = file('package-depends.dot', 'w' ) | ||
426 | print >> depends_file, "digraph depends {" | ||
427 | for package in depgraph["packages"]: | ||
428 | pn = depgraph["packages"][package]["pn"] | ||
429 | fn = depgraph["packages"][package]["filename"] | ||
430 | version = depgraph["packages"][package]["version"] | ||
431 | if package == pn: | ||
432 | print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn) | ||
433 | else: | ||
434 | print >> depends_file, '"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn) | ||
435 | for depend in depgraph["depends"][pn]: | ||
436 | print >> depends_file, '"%s" -> "%s"' % (package, depend) | ||
437 | for package in depgraph["rdepends-pkg"]: | ||
438 | for rdepend in depgraph["rdepends-pkg"][package]: | ||
439 | print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend) | ||
440 | for package in depgraph["rrecs-pkg"]: | ||
441 | for rdepend in depgraph["rrecs-pkg"][package]: | ||
442 | print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend) | ||
443 | print >> depends_file, "}" | ||
444 | bb.msg.plain("Package dependencies saved to 'package-depends.dot'") | ||
445 | |||
446 | tdepends_file = file('task-depends.dot', 'w' ) | ||
447 | print >> tdepends_file, "digraph depends {" | ||
448 | for task in depgraph["tdepends"]: | ||
449 | (pn, taskname) = task.rsplit(".", 1) | ||
450 | fn = depgraph["pn"][pn]["filename"] | ||
451 | version = depgraph["pn"][pn]["version"] | ||
452 | print >> tdepends_file, '"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn) | ||
453 | for dep in depgraph["tdepends"][task]: | ||
454 | print >> tdepends_file, '"%s" -> "%s"' % (task, dep) | ||
455 | print >> tdepends_file, "}" | ||
456 | bb.msg.plain("Task dependencies saved to 'task-depends.dot'") | ||
457 | |||
458 | def buildDepgraph( self ): | ||
459 | all_depends = self.status.all_depends | ||
460 | pn_provides = self.status.pn_provides | ||
461 | |||
462 | localdata = data.createCopy(self.configuration.data) | ||
463 | bb.data.update_data(localdata) | ||
464 | bb.data.expandKeys(localdata) | ||
465 | |||
466 | def calc_bbfile_priority(filename): | ||
467 | for (regex, pri) in self.status.bbfile_config_priorities: | ||
468 | if regex.match(filename): | ||
469 | return pri | ||
470 | return 0 | ||
471 | |||
472 | # Handle PREFERRED_PROVIDERS | ||
473 | for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split(): | ||
474 | try: | ||
475 | (providee, provider) = p.split(':') | ||
476 | except: | ||
477 | bb.msg.fatal(bb.msg.domain.Provider, "Malformed option in PREFERRED_PROVIDERS variable: %s" % p) | ||
478 | continue | ||
479 | if providee in self.status.preferred and self.status.preferred[providee] != provider: | ||
480 | bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee])) | ||
481 | self.status.preferred[providee] = provider | ||
482 | |||
483 | # Calculate priorities for each file | ||
484 | for p in self.status.pkg_fn.keys(): | ||
485 | self.status.bbfile_priority[p] = calc_bbfile_priority(p) | ||
486 | |||
487 | def buildWorldTargetList(self): | ||
488 | """ | ||
489 | Build package list for "bitbake world" | ||
490 | """ | ||
491 | all_depends = self.status.all_depends | ||
492 | pn_provides = self.status.pn_provides | ||
493 | bb.msg.debug(1, bb.msg.domain.Parsing, "collating packages for \"world\"") | ||
494 | for f in self.status.possible_world: | ||
495 | terminal = True | ||
496 | pn = self.status.pkg_fn[f] | ||
497 | |||
498 | for p in pn_provides[pn]: | ||
499 | if p.startswith('virtual/'): | ||
500 | bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to %s provider starting with virtual/" % (f, p)) | ||
501 | terminal = False | ||
502 | break | ||
503 | for pf in self.status.providers[p]: | ||
504 | if self.status.pkg_fn[pf] != pn: | ||
505 | bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to both us and %s providing %s" % (f, pf, p)) | ||
506 | terminal = False | ||
507 | break | ||
508 | if terminal: | ||
509 | self.status.world_target.add(pn) | ||
510 | |||
511 | # drop reference count now | ||
512 | self.status.possible_world = None | ||
513 | self.status.all_depends = None | ||
514 | |||
515 | def interactiveMode( self ): | ||
516 | """Drop off into a shell""" | ||
517 | try: | ||
518 | from bb import shell | ||
519 | except ImportError, details: | ||
520 | bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details ) | ||
521 | else: | ||
522 | shell.start( self ) | ||
523 | |||
524 | def parseConfigurationFile( self, afile ): | ||
525 | try: | ||
526 | self.configuration.data = bb.parse.handle( afile, self.configuration.data ) | ||
527 | |||
528 | # Handle any INHERITs and inherit the base class | ||
529 | inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split() | ||
530 | for inherit in inherits: | ||
531 | self.configuration.data = bb.parse.handle(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True ) | ||
532 | |||
533 | # Nomally we only register event handlers at the end of parsing .bb files | ||
534 | # We register any handlers we've found so far here... | ||
535 | for var in data.getVar('__BBHANDLERS', self.configuration.data) or []: | ||
536 | bb.event.register(var,bb.data.getVar(var, self.configuration.data)) | ||
537 | |||
538 | bb.fetch.fetcher_init(self.configuration.data) | ||
539 | |||
540 | bb.event.fire(bb.event.ConfigParsed(self.configuration.data)) | ||
541 | |||
542 | except IOError, e: | ||
543 | bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (afile, str(e))) | ||
544 | except IOError: | ||
545 | bb.msg.fatal(bb.msg.domain.Parsing, "Unable to open %s" % afile ) | ||
546 | except bb.parse.ParseError, details: | ||
547 | bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (afile, details) ) | ||
548 | |||
549 | def handleCollections( self, collections ): | ||
550 | """Handle collections""" | ||
551 | if collections: | ||
552 | collection_list = collections.split() | ||
553 | for c in collection_list: | ||
554 | regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1) | ||
555 | if regex == None: | ||
556 | bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s not defined" % c) | ||
557 | continue | ||
558 | priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1) | ||
559 | if priority == None: | ||
560 | bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PRIORITY_%s not defined" % c) | ||
561 | continue | ||
562 | try: | ||
563 | cre = re.compile(regex) | ||
564 | except re.error: | ||
565 | bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s \"%s\" is not a valid regular expression" % (c, regex)) | ||
566 | continue | ||
567 | try: | ||
568 | pri = int(priority) | ||
569 | self.status.bbfile_config_priorities.append((cre, pri)) | ||
570 | except ValueError: | ||
571 | bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority)) | ||
572 | |||
573 | def buildSetVars(self): | ||
574 | """ | ||
575 | Setup any variables needed before starting a build | ||
576 | """ | ||
577 | if not bb.data.getVar("BUILDNAME", self.configuration.data): | ||
578 | bb.data.setVar("BUILDNAME", os.popen('date +%Y%m%d%H%M').readline().strip(), self.configuration.data) | ||
579 | bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S',time.gmtime()), self.configuration.data) | ||
580 | |||
581 | def matchFiles(self, buildfile): | ||
582 | """ | ||
583 | Find the .bb files which match the expression in 'buildfile'. | ||
584 | """ | ||
585 | |||
586 | bf = os.path.abspath(buildfile) | ||
587 | try: | ||
588 | os.stat(bf) | ||
589 | return [bf] | ||
590 | except OSError: | ||
591 | (filelist, masked) = self.collect_bbfiles() | ||
592 | regexp = re.compile(buildfile) | ||
593 | matches = [] | ||
594 | for f in filelist: | ||
595 | if regexp.search(f) and os.path.isfile(f): | ||
596 | bf = f | ||
597 | matches.append(f) | ||
598 | return matches | ||
599 | |||
600 | def matchFile(self, buildfile): | ||
601 | """ | ||
602 | Find the .bb file which matches the expression in 'buildfile'. | ||
603 | Raise an error if multiple files | ||
604 | """ | ||
605 | matches = self.matchFiles(buildfile) | ||
606 | if len(matches) != 1: | ||
607 | bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches))) | ||
608 | for f in matches: | ||
609 | bb.msg.error(bb.msg.domain.Parsing, " %s" % f) | ||
610 | raise MultipleMatches | ||
611 | return matches[0] | ||
612 | |||
613 | def buildFile(self, buildfile, task): | ||
614 | """ | ||
615 | Build the file matching regexp buildfile | ||
616 | """ | ||
617 | |||
618 | fn = self.matchFile(buildfile) | ||
619 | self.buildSetVars() | ||
620 | |||
621 | # Load data into the cache for fn | ||
622 | self.bb_cache = bb.cache.init(self) | ||
623 | self.bb_cache.loadData(fn, self.configuration.data) | ||
624 | |||
625 | # Parse the loaded cache data | ||
626 | self.status = bb.cache.CacheData() | ||
627 | self.bb_cache.handle_data(fn, self.status) | ||
628 | |||
629 | # Tweak some variables | ||
630 | item = self.bb_cache.getVar('PN', fn, True) | ||
631 | self.status.ignored_dependencies = Set() | ||
632 | self.status.bbfile_priority[fn] = 1 | ||
633 | |||
634 | # Remove external dependencies | ||
635 | self.status.task_deps[fn]['depends'] = {} | ||
636 | self.status.deps[fn] = [] | ||
637 | self.status.rundeps[fn] = [] | ||
638 | self.status.runrecs[fn] = [] | ||
639 | |||
640 | # Remove stamp for target if force mode active | ||
641 | if self.configuration.force: | ||
642 | bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (task, fn)) | ||
643 | bb.build.del_stamp('do_%s' % task, self.status, fn) | ||
644 | |||
645 | # Setup taskdata structure | ||
646 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
647 | taskdata.add_provider(self.configuration.data, self.status, item) | ||
648 | |||
649 | buildname = bb.data.getVar("BUILDNAME", self.configuration.data) | ||
650 | bb.event.fire(bb.event.BuildStarted(buildname, [item], self.configuration.event_data)) | ||
651 | |||
652 | # Execute the runqueue | ||
653 | runlist = [[item, "do_%s" % self.configuration.cmd]] | ||
654 | |||
655 | rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) | ||
656 | |||
657 | def buildFileIdle(server, rq, abort): | ||
658 | |||
659 | if abort or self.cookerAction == cookerStop: | ||
660 | rq.finish_runqueue(True) | ||
661 | elif self.cookerAction == cookerShutdown: | ||
662 | rq.finish_runqueue(False) | ||
663 | failures = 0 | ||
664 | try: | ||
665 | retval = rq.execute_runqueue() | ||
666 | except runqueue.TaskFailure, fnids: | ||
667 | for fnid in fnids: | ||
668 | bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) | ||
669 | failures = failures + 1 | ||
670 | retval = False | ||
671 | if not retval: | ||
672 | self.cookerIdle = True | ||
673 | self.command.finishOfflineCommand() | ||
674 | bb.event.fire(bb.event.BuildCompleted(buildname, targets, self.configuration.event_data, failures)) | ||
675 | return retval | ||
676 | |||
677 | self.cookerIdle = False | ||
678 | self.server.register_idle_function(buildFileIdle, rq) | ||
679 | |||
680 | def buildTargets(self, targets): | ||
681 | """ | ||
682 | Attempt to build the targets specified | ||
683 | """ | ||
684 | |||
685 | # Need files parsed | ||
686 | self.updateCache() | ||
687 | |||
688 | targets = self.checkPackages(targets) | ||
689 | |||
690 | def buildTargetsIdle(server, rq, abort): | ||
691 | |||
692 | if abort or self.cookerAction == cookerStop: | ||
693 | rq.finish_runqueue(True) | ||
694 | elif self.cookerAction == cookerShutdown: | ||
695 | rq.finish_runqueue(False) | ||
696 | failures = 0 | ||
697 | try: | ||
698 | retval = rq.execute_runqueue() | ||
699 | except runqueue.TaskFailure, fnids: | ||
700 | for fnid in fnids: | ||
701 | bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) | ||
702 | failures = failures + 1 | ||
703 | retval = False | ||
704 | if not retval: | ||
705 | self.cookerIdle = True | ||
706 | self.command.finishOfflineCommand() | ||
707 | bb.event.fire(bb.event.BuildCompleted(buildname, targets, self.configuration.event_data, failures)) | ||
708 | return retval | ||
709 | |||
710 | self.buildSetVars() | ||
711 | |||
712 | buildname = bb.data.getVar("BUILDNAME", self.configuration.data) | ||
713 | bb.event.fire(bb.event.BuildStarted(buildname, targets, self.configuration.event_data)) | ||
714 | |||
715 | localdata = data.createCopy(self.configuration.data) | ||
716 | bb.data.update_data(localdata) | ||
717 | bb.data.expandKeys(localdata) | ||
718 | |||
719 | taskdata = bb.taskdata.TaskData(self.configuration.abort) | ||
720 | |||
721 | runlist = [] | ||
722 | for k in targets: | ||
723 | taskdata.add_provider(localdata, self.status, k) | ||
724 | runlist.append([k, "do_%s" % self.configuration.cmd]) | ||
725 | taskdata.add_unresolved(localdata, self.status) | ||
726 | |||
727 | rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) | ||
728 | |||
729 | self.cookerIdle = False | ||
730 | self.server.register_idle_function(buildTargetsIdle, rq) | ||
731 | |||
732 | def updateCache(self): | ||
733 | |||
734 | if self.cookerState == cookerParsed: | ||
735 | return | ||
736 | |||
737 | # Import Psyco if available and not disabled | ||
738 | import platform | ||
739 | if platform.machine() in ['i386', 'i486', 'i586', 'i686']: | ||
740 | if not self.configuration.disable_psyco: | ||
741 | try: | ||
742 | import psyco | ||
743 | except ImportError: | ||
744 | bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.") | ||
745 | else: | ||
746 | psyco.bind( self.parse_bbfiles ) | ||
747 | else: | ||
748 | bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.") | ||
749 | |||
750 | self.status = bb.cache.CacheData() | ||
751 | |||
752 | ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or "" | ||
753 | self.status.ignored_dependencies = Set(ignore.split()) | ||
754 | |||
755 | for dep in self.configuration.extra_assume_provided: | ||
756 | self.status.ignored_dependencies.add(dep) | ||
757 | |||
758 | self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) ) | ||
759 | |||
760 | bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files") | ||
761 | (filelist, masked) = self.collect_bbfiles() | ||
762 | self.parse_bbfiles(filelist, masked) | ||
763 | bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete") | ||
764 | |||
765 | self.buildDepgraph() | ||
766 | |||
767 | self.cookerState = cookerParsed | ||
768 | |||
769 | def checkPackages(self, pkgs_to_build): | ||
770 | |||
771 | if len(pkgs_to_build) == 0: | ||
772 | raise NothingToBuild | ||
773 | |||
774 | if 'world' in pkgs_to_build: | ||
775 | self.buildWorldTargetList() | ||
776 | pkgs_to_build.remove('world') | ||
777 | for t in self.status.world_target: | ||
778 | pkgs_to_build.append(t) | ||
779 | |||
780 | return pkgs_to_build | ||
781 | |||
782 | def get_bbfiles( self, path = os.getcwd() ): | ||
783 | """Get list of default .bb files by reading out the current directory""" | ||
784 | contents = os.listdir(path) | ||
785 | bbfiles = [] | ||
786 | for f in contents: | ||
787 | (root, ext) = os.path.splitext(f) | ||
788 | if ext == ".bb": | ||
789 | bbfiles.append(os.path.abspath(os.path.join(os.getcwd(),f))) | ||
790 | return bbfiles | ||
791 | |||
792 | def find_bbfiles( self, path ): | ||
793 | """Find all the .bb files in a directory""" | ||
794 | from os.path import join | ||
795 | |||
796 | found = [] | ||
797 | for dir, dirs, files in os.walk(path): | ||
798 | for ignored in ('SCCS', 'CVS', '.svn'): | ||
799 | if ignored in dirs: | ||
800 | dirs.remove(ignored) | ||
801 | found += [join(dir,f) for f in files if f.endswith('.bb')] | ||
802 | |||
803 | return found | ||
804 | |||
805 | def collect_bbfiles( self ): | ||
806 | """Collect all available .bb build files""" | ||
807 | parsed, cached, skipped, masked = 0, 0, 0, 0 | ||
808 | self.bb_cache = bb.cache.init(self) | ||
809 | |||
810 | files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split() | ||
811 | data.setVar("BBFILES", " ".join(files), self.configuration.data) | ||
812 | |||
813 | if not len(files): | ||
814 | files = self.get_bbfiles() | ||
815 | |||
816 | if not len(files): | ||
817 | bb.msg.error(bb.msg.domain.Collection, "no files to build.") | ||
818 | |||
819 | newfiles = [] | ||
820 | for f in files: | ||
821 | if os.path.isdir(f): | ||
822 | dirfiles = self.find_bbfiles(f) | ||
823 | if dirfiles: | ||
824 | newfiles += dirfiles | ||
825 | continue | ||
826 | newfiles += glob.glob(f) or [ f ] | ||
827 | |||
828 | bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1) | ||
829 | |||
830 | if not bbmask: | ||
831 | return (newfiles, 0) | ||
832 | |||
833 | try: | ||
834 | bbmask_compiled = re.compile(bbmask) | ||
835 | except sre_constants.error: | ||
836 | bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.") | ||
837 | |||
838 | finalfiles = [] | ||
839 | for i in xrange( len( newfiles ) ): | ||
840 | f = newfiles[i] | ||
841 | if bbmask and bbmask_compiled.search(f): | ||
842 | bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f) | ||
843 | masked += 1 | ||
844 | continue | ||
845 | finalfiles.append(f) | ||
846 | |||
847 | return (finalfiles, masked) | ||
848 | |||
849 | def parse_bbfiles(self, filelist, masked): | ||
850 | parsed, cached, skipped, error, total = 0, 0, 0, 0, len(filelist) | ||
851 | for i in xrange(total): | ||
852 | f = filelist[i] | ||
853 | |||
854 | #bb.msg.debug(1, bb.msg.domain.Collection, "parsing %s" % f) | ||
855 | |||
856 | # read a file's metadata | ||
857 | try: | ||
858 | fromCache, skip = self.bb_cache.loadData(f, self.configuration.data) | ||
859 | if skip: | ||
860 | skipped += 1 | ||
861 | bb.msg.debug(2, bb.msg.domain.Collection, "skipping %s" % f) | ||
862 | self.bb_cache.skip(f) | ||
863 | continue | ||
864 | elif fromCache: cached += 1 | ||
865 | else: parsed += 1 | ||
866 | deps = None | ||
867 | |||
868 | # Disabled by RP as was no longer functional | ||
869 | # allow metadata files to add items to BBFILES | ||
870 | #data.update_data(self.pkgdata[f]) | ||
871 | #addbbfiles = self.bb_cache.getVar('BBFILES', f, False) or None | ||
872 | #if addbbfiles: | ||
873 | # for aof in addbbfiles.split(): | ||
874 | # if not files.count(aof): | ||
875 | # if not os.path.isabs(aof): | ||
876 | # aof = os.path.join(os.path.dirname(f),aof) | ||
877 | # files.append(aof) | ||
878 | |||
879 | self.bb_cache.handle_data(f, self.status) | ||
880 | |||
881 | except IOError, e: | ||
882 | error += 1 | ||
883 | self.bb_cache.remove(f) | ||
884 | bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e)) | ||
885 | pass | ||
886 | except KeyboardInterrupt: | ||
887 | self.bb_cache.sync() | ||
888 | raise | ||
889 | except Exception, e: | ||
890 | error += 1 | ||
891 | self.bb_cache.remove(f) | ||
892 | bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f)) | ||
893 | except: | ||
894 | self.bb_cache.remove(f) | ||
895 | raise | ||
896 | finally: | ||
897 | bb.event.fire(bb.event.ParseProgress(self.configuration.event_data, cached, parsed, skipped, masked, error, total)) | ||
898 | |||
899 | self.bb_cache.sync() | ||
900 | if error > 0: | ||
901 | raise ParsingErrorsFound | ||
902 | |||
903 | def serve(self): | ||
904 | |||
905 | if self.configuration.profile: | ||
906 | try: | ||
907 | import cProfile as profile | ||
908 | except: | ||
909 | import profile | ||
910 | |||
911 | profile.runctx("self.server.serve_forever()", globals(), locals(), "profile.log") | ||
912 | |||
913 | # Redirect stdout to capture profile information | ||
914 | pout = open('profile.log.processed', 'w') | ||
915 | so = sys.stdout.fileno() | ||
916 | os.dup2(pout.fileno(), so) | ||
917 | |||
918 | import pstats | ||
919 | p = pstats.Stats('profile.log') | ||
920 | p.sort_stats('time') | ||
921 | p.print_stats() | ||
922 | p.print_callers() | ||
923 | p.sort_stats('cumulative') | ||
924 | p.print_stats() | ||
925 | |||
926 | os.dup2(so, pout.fileno()) | ||
927 | pout.flush() | ||
928 | pout.close() | ||
929 | else: | ||
930 | self.server.serve_forever() | ||
931 | |||
932 | bb.event.fire(CookerExit(self.configuration.event_data)) | ||
933 | |||
934 | class CookerExit(bb.event.Event): | ||
935 | """ | ||
936 | Notify clients of the Cooker shutdown | ||
937 | """ | ||
938 | |||
939 | def __init__(self, d): | ||
940 | bb.event.Event.__init__(self, d) | ||
941 | |||