diff options
author | Tudor Florea <tudor.florea@enea.com> | 2015-10-09 20:59:03 (GMT) |
---|---|---|
committer | Tudor Florea <tudor.florea@enea.com> | 2015-10-09 20:59:03 (GMT) |
commit | 972dcfcdbfe75dcfeb777150c136576cf1a71e99 (patch) | |
tree | 97a61cd7e293d7ae9d56ef7ed0f81253365bb026 /bitbake/lib | |
download | poky-972dcfcdbfe75dcfeb777150c136576cf1a71e99.tar.gz |
initial commit for Enea Linux 5.0 arm
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'bitbake/lib')
299 files changed, 229025 insertions, 0 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py new file mode 100644 index 0000000..6917ec3 --- /dev/null +++ b/bitbake/lib/bb/COW.py | |||
@@ -0,0 +1,323 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # This is a copy on write dictionary and set which abuses classes to try and be nice and fast. | ||
5 | # | ||
6 | # Copyright (C) 2006 Tim Amsell | ||
7 | # | ||
8 | # This program is free software; you can redistribute it and/or modify | ||
9 | # it under the terms of the GNU General Public License version 2 as | ||
10 | # published by the Free Software Foundation. | ||
11 | # | ||
12 | # This program is distributed in the hope that it will be useful, | ||
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
15 | # GNU General Public License for more details. | ||
16 | # | ||
17 | # You should have received a copy of the GNU General Public License along | ||
18 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
19 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
20 | # | ||
21 | #Please Note: | ||
22 | # Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. | ||
23 | # Assign a file to __warn__ to get warnings about slow operations. | ||
24 | # | ||
25 | |||
26 | from __future__ import print_function | ||
27 | import copy | ||
28 | import types | ||
29 | ImmutableTypes = ( | ||
30 | types.NoneType, | ||
31 | bool, | ||
32 | complex, | ||
33 | float, | ||
34 | int, | ||
35 | long, | ||
36 | tuple, | ||
37 | frozenset, | ||
38 | basestring | ||
39 | ) | ||
40 | |||
41 | MUTABLE = "__mutable__" | ||
42 | |||
43 | class COWMeta(type): | ||
44 | pass | ||
45 | |||
46 | class COWDictMeta(COWMeta): | ||
47 | __warn__ = False | ||
48 | __hasmutable__ = False | ||
49 | __marker__ = tuple() | ||
50 | |||
51 | def __str__(cls): | ||
52 | # FIXME: I have magic numbers! | ||
53 | return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) | ||
54 | __repr__ = __str__ | ||
55 | |||
56 | def cow(cls): | ||
57 | class C(cls): | ||
58 | __count__ = cls.__count__ + 1 | ||
59 | return C | ||
60 | copy = cow | ||
61 | __call__ = cow | ||
62 | |||
63 | def __setitem__(cls, key, value): | ||
64 | if not isinstance(value, ImmutableTypes): | ||
65 | if not isinstance(value, COWMeta): | ||
66 | cls.__hasmutable__ = True | ||
67 | key += MUTABLE | ||
68 | setattr(cls, key, value) | ||
69 | |||
70 | def __getmutable__(cls, key, readonly=False): | ||
71 | nkey = key + MUTABLE | ||
72 | try: | ||
73 | return cls.__dict__[nkey] | ||
74 | except KeyError: | ||
75 | pass | ||
76 | |||
77 | value = getattr(cls, nkey) | ||
78 | if readonly: | ||
79 | return value | ||
80 | |||
81 | if not cls.__warn__ is False and not isinstance(value, COWMeta): | ||
82 | print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__) | ||
83 | try: | ||
84 | value = value.copy() | ||
85 | except AttributeError as e: | ||
86 | value = copy.copy(value) | ||
87 | setattr(cls, nkey, value) | ||
88 | return value | ||
89 | |||
90 | __getmarker__ = [] | ||
91 | def __getreadonly__(cls, key, default=__getmarker__): | ||
92 | """\ | ||
93 | Get a value (even if mutable) which you promise not to change. | ||
94 | """ | ||
95 | return cls.__getitem__(key, default, True) | ||
96 | |||
97 | def __getitem__(cls, key, default=__getmarker__, readonly=False): | ||
98 | try: | ||
99 | try: | ||
100 | value = getattr(cls, key) | ||
101 | except AttributeError: | ||
102 | value = cls.__getmutable__(key, readonly) | ||
103 | |||
104 | # This is for values which have been deleted | ||
105 | if value is cls.__marker__: | ||
106 | raise AttributeError("key %s does not exist." % key) | ||
107 | |||
108 | return value | ||
109 | except AttributeError as e: | ||
110 | if not default is cls.__getmarker__: | ||
111 | return default | ||
112 | |||
113 | raise KeyError(str(e)) | ||
114 | |||
115 | def __delitem__(cls, key): | ||
116 | cls.__setitem__(key, cls.__marker__) | ||
117 | |||
118 | def __revertitem__(cls, key): | ||
119 | if not cls.__dict__.has_key(key): | ||
120 | key += MUTABLE | ||
121 | delattr(cls, key) | ||
122 | |||
123 | def __contains__(cls, key): | ||
124 | return cls.has_key(key) | ||
125 | |||
126 | def has_key(cls, key): | ||
127 | value = cls.__getreadonly__(key, cls.__marker__) | ||
128 | if value is cls.__marker__: | ||
129 | return False | ||
130 | return True | ||
131 | |||
132 | def iter(cls, type, readonly=False): | ||
133 | for key in dir(cls): | ||
134 | if key.startswith("__"): | ||
135 | continue | ||
136 | |||
137 | if key.endswith(MUTABLE): | ||
138 | key = key[:-len(MUTABLE)] | ||
139 | |||
140 | if type == "keys": | ||
141 | yield key | ||
142 | |||
143 | try: | ||
144 | if readonly: | ||
145 | value = cls.__getreadonly__(key) | ||
146 | else: | ||
147 | value = cls[key] | ||
148 | except KeyError: | ||
149 | continue | ||
150 | |||
151 | if type == "values": | ||
152 | yield value | ||
153 | if type == "items": | ||
154 | yield (key, value) | ||
155 | raise StopIteration() | ||
156 | |||
157 | def iterkeys(cls): | ||
158 | return cls.iter("keys") | ||
159 | def itervalues(cls, readonly=False): | ||
160 | if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: | ||
161 | print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__) | ||
162 | return cls.iter("values", readonly) | ||
163 | def iteritems(cls, readonly=False): | ||
164 | if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: | ||
165 | print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__) | ||
166 | return cls.iter("items", readonly) | ||
167 | |||
168 | class COWSetMeta(COWDictMeta): | ||
169 | def __str__(cls): | ||
170 | # FIXME: I have magic numbers! | ||
171 | return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3) | ||
172 | __repr__ = __str__ | ||
173 | |||
174 | def cow(cls): | ||
175 | class C(cls): | ||
176 | __count__ = cls.__count__ + 1 | ||
177 | return C | ||
178 | |||
179 | def add(cls, value): | ||
180 | COWDictMeta.__setitem__(cls, repr(hash(value)), value) | ||
181 | |||
182 | def remove(cls, value): | ||
183 | COWDictMeta.__delitem__(cls, repr(hash(value))) | ||
184 | |||
185 | def __in__(cls, value): | ||
186 | return COWDictMeta.has_key(repr(hash(value))) | ||
187 | |||
188 | def iterkeys(cls): | ||
189 | raise TypeError("sets don't have keys") | ||
190 | |||
191 | def iteritems(cls): | ||
192 | raise TypeError("sets don't have 'items'") | ||
193 | |||
194 | # These are the actual classes you use! | ||
195 | class COWDictBase(object): | ||
196 | __metaclass__ = COWDictMeta | ||
197 | __count__ = 0 | ||
198 | |||
199 | class COWSetBase(object): | ||
200 | __metaclass__ = COWSetMeta | ||
201 | __count__ = 0 | ||
202 | |||
203 | if __name__ == "__main__": | ||
204 | import sys | ||
205 | COWDictBase.__warn__ = sys.stderr | ||
206 | a = COWDictBase() | ||
207 | print("a", a) | ||
208 | |||
209 | a['a'] = 'a' | ||
210 | a['b'] = 'b' | ||
211 | a['dict'] = {} | ||
212 | |||
213 | b = a.copy() | ||
214 | print("b", b) | ||
215 | b['c'] = 'b' | ||
216 | |||
217 | print() | ||
218 | |||
219 | print("a", a) | ||
220 | for x in a.iteritems(): | ||
221 | print(x) | ||
222 | print("--") | ||
223 | print("b", b) | ||
224 | for x in b.iteritems(): | ||
225 | print(x) | ||
226 | print() | ||
227 | |||
228 | b['dict']['a'] = 'b' | ||
229 | b['a'] = 'c' | ||
230 | |||
231 | print("a", a) | ||
232 | for x in a.iteritems(): | ||
233 | print(x) | ||
234 | print("--") | ||
235 | print("b", b) | ||
236 | for x in b.iteritems(): | ||
237 | print(x) | ||
238 | print() | ||
239 | |||
240 | try: | ||
241 | b['dict2'] | ||
242 | except KeyError as e: | ||
243 | print("Okay!") | ||
244 | |||
245 | a['set'] = COWSetBase() | ||
246 | a['set'].add("o1") | ||
247 | a['set'].add("o1") | ||
248 | a['set'].add("o2") | ||
249 | |||
250 | print("a", a) | ||
251 | for x in a['set'].itervalues(): | ||
252 | print(x) | ||
253 | print("--") | ||
254 | print("b", b) | ||
255 | for x in b['set'].itervalues(): | ||
256 | print(x) | ||
257 | print() | ||
258 | |||
259 | b['set'].add('o3') | ||
260 | |||
261 | print("a", a) | ||
262 | for x in a['set'].itervalues(): | ||
263 | print(x) | ||
264 | print("--") | ||
265 | print("b", b) | ||
266 | for x in b['set'].itervalues(): | ||
267 | print(x) | ||
268 | print() | ||
269 | |||
270 | a['set2'] = set() | ||
271 | a['set2'].add("o1") | ||
272 | a['set2'].add("o1") | ||
273 | a['set2'].add("o2") | ||
274 | |||
275 | print("a", a) | ||
276 | for x in a.iteritems(): | ||
277 | print(x) | ||
278 | print("--") | ||
279 | print("b", b) | ||
280 | for x in b.iteritems(readonly=True): | ||
281 | print(x) | ||
282 | print() | ||
283 | |||
284 | del b['b'] | ||
285 | try: | ||
286 | print(b['b']) | ||
287 | except KeyError: | ||
288 | print("Yay! deleted key raises error") | ||
289 | |||
290 | if b.has_key('b'): | ||
291 | print("Boo!") | ||
292 | else: | ||
293 | print("Yay - has_key with delete works!") | ||
294 | |||
295 | print("a", a) | ||
296 | for x in a.iteritems(): | ||
297 | print(x) | ||
298 | print("--") | ||
299 | print("b", b) | ||
300 | for x in b.iteritems(readonly=True): | ||
301 | print(x) | ||
302 | print() | ||
303 | |||
304 | b.__revertitem__('b') | ||
305 | |||
306 | print("a", a) | ||
307 | for x in a.iteritems(): | ||
308 | print(x) | ||
309 | print("--") | ||
310 | print("b", b) | ||
311 | for x in b.iteritems(readonly=True): | ||
312 | print(x) | ||
313 | print() | ||
314 | |||
315 | b.__revertitem__('dict') | ||
316 | print("a", a) | ||
317 | for x in a.iteritems(): | ||
318 | print(x) | ||
319 | print("--") | ||
320 | print("b", b) | ||
321 | for x in b.iteritems(readonly=True): | ||
322 | print(x) | ||
323 | print() | ||
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py new file mode 100644 index 0000000..4d69552 --- /dev/null +++ b/bitbake/lib/bb/__init__.py | |||
@@ -0,0 +1,142 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # BitBake Build System Python Library | ||
5 | # | ||
6 | # Copyright (C) 2003 Holger Schurig | ||
7 | # Copyright (C) 2003, 2004 Chris Larson | ||
8 | # | ||
9 | # Based on Gentoo's portage.py. | ||
10 | # | ||
11 | # This program is free software; you can redistribute it and/or modify | ||
12 | # it under the terms of the GNU General Public License version 2 as | ||
13 | # published by the Free Software Foundation. | ||
14 | # | ||
15 | # This program is distributed in the hope that it will be useful, | ||
16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
18 | # GNU General Public License for more details. | ||
19 | # | ||
20 | # You should have received a copy of the GNU General Public License along | ||
21 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
22 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
23 | |||
24 | __version__ = "1.24.0" | ||
25 | |||
26 | import sys | ||
27 | if sys.version_info < (2, 7, 3): | ||
28 | raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake") | ||
29 | |||
30 | |||
31 | class BBHandledException(Exception): | ||
32 | """ | ||
33 | The big dilemma for generic bitbake code is what information to give the user | ||
34 | when an exception occurs. Any exception inheriting this base exception class | ||
35 | has already provided information to the user via some 'fired' message type such as | ||
36 | an explicitly fired event using bb.fire, or a bb.error message. If bitbake | ||
37 | encounters an exception derived from this class, no backtrace or other information | ||
38 | will be given to the user, its assumed the earlier event provided the relevant information. | ||
39 | """ | ||
40 | pass | ||
41 | |||
42 | import os | ||
43 | import logging | ||
44 | |||
45 | |||
46 | class NullHandler(logging.Handler): | ||
47 | def emit(self, record): | ||
48 | pass | ||
49 | |||
50 | Logger = logging.getLoggerClass() | ||
51 | class BBLogger(Logger): | ||
52 | def __init__(self, name): | ||
53 | if name.split(".")[0] == "BitBake": | ||
54 | self.debug = self.bbdebug | ||
55 | Logger.__init__(self, name) | ||
56 | |||
57 | def bbdebug(self, level, msg, *args, **kwargs): | ||
58 | return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs) | ||
59 | |||
60 | def plain(self, msg, *args, **kwargs): | ||
61 | return self.log(logging.INFO + 1, msg, *args, **kwargs) | ||
62 | |||
63 | def verbose(self, msg, *args, **kwargs): | ||
64 | return self.log(logging.INFO - 1, msg, *args, **kwargs) | ||
65 | |||
66 | logging.raiseExceptions = False | ||
67 | logging.setLoggerClass(BBLogger) | ||
68 | |||
69 | logger = logging.getLogger("BitBake") | ||
70 | logger.addHandler(NullHandler()) | ||
71 | logger.setLevel(logging.DEBUG - 2) | ||
72 | |||
73 | # This has to be imported after the setLoggerClass, as the import of bb.msg | ||
74 | # can result in construction of the various loggers. | ||
75 | import bb.msg | ||
76 | |||
77 | from bb import fetch2 as fetch | ||
78 | sys.modules['bb.fetch'] = sys.modules['bb.fetch2'] | ||
79 | |||
80 | # Messaging convenience functions | ||
81 | def plain(*args): | ||
82 | logger.plain(''.join(args)) | ||
83 | |||
84 | def debug(lvl, *args): | ||
85 | if isinstance(lvl, basestring): | ||
86 | logger.warn("Passed invalid debug level '%s' to bb.debug", lvl) | ||
87 | args = (lvl,) + args | ||
88 | lvl = 1 | ||
89 | logger.debug(lvl, ''.join(args)) | ||
90 | |||
91 | def note(*args): | ||
92 | logger.info(''.join(args)) | ||
93 | |||
94 | def warn(*args): | ||
95 | logger.warn(''.join(args)) | ||
96 | |||
97 | def error(*args): | ||
98 | logger.error(''.join(args)) | ||
99 | |||
100 | def fatal(*args): | ||
101 | logger.critical(''.join(args)) | ||
102 | raise BBHandledException() | ||
103 | |||
104 | def deprecated(func, name=None, advice=""): | ||
105 | """This is a decorator which can be used to mark functions | ||
106 | as deprecated. It will result in a warning being emitted | ||
107 | when the function is used.""" | ||
108 | import warnings | ||
109 | |||
110 | if advice: | ||
111 | advice = ": %s" % advice | ||
112 | if name is None: | ||
113 | name = func.__name__ | ||
114 | |||
115 | def newFunc(*args, **kwargs): | ||
116 | warnings.warn("Call to deprecated function %s%s." % (name, | ||
117 | advice), | ||
118 | category=DeprecationWarning, | ||
119 | stacklevel=2) | ||
120 | return func(*args, **kwargs) | ||
121 | newFunc.__name__ = func.__name__ | ||
122 | newFunc.__doc__ = func.__doc__ | ||
123 | newFunc.__dict__.update(func.__dict__) | ||
124 | return newFunc | ||
125 | |||
126 | # For compatibility | ||
127 | def deprecate_import(current, modulename, fromlist, renames = None): | ||
128 | """Import objects from one module into another, wrapping them with a DeprecationWarning""" | ||
129 | import sys | ||
130 | |||
131 | module = __import__(modulename, fromlist = fromlist) | ||
132 | for position, objname in enumerate(fromlist): | ||
133 | obj = getattr(module, objname) | ||
134 | newobj = deprecated(obj, "{0}.{1}".format(current, objname), | ||
135 | "Please use {0}.{1} instead".format(modulename, objname)) | ||
136 | if renames: | ||
137 | newname = renames[position] | ||
138 | else: | ||
139 | newname = objname | ||
140 | |||
141 | setattr(sys.modules[current], newname, newobj) | ||
142 | |||
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py new file mode 100644 index 0000000..65cc851 --- /dev/null +++ b/bitbake/lib/bb/build.py | |||
@@ -0,0 +1,711 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # BitBake 'Build' implementation | ||
5 | # | ||
6 | # Core code for function execution and task handling in the | ||
7 | # BitBake build tools. | ||
8 | # | ||
9 | # Copyright (C) 2003, 2004 Chris Larson | ||
10 | # | ||
11 | # Based on Gentoo's portage.py. | ||
12 | # | ||
13 | # This program is free software; you can redistribute it and/or modify | ||
14 | # it under the terms of the GNU General Public License version 2 as | ||
15 | # published by the Free Software Foundation. | ||
16 | # | ||
17 | # This program is distributed in the hope that it will be useful, | ||
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
20 | # GNU General Public License for more details. | ||
21 | # | ||
22 | # You should have received a copy of the GNU General Public License along | ||
23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
25 | # | ||
26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
27 | |||
28 | import os | ||
29 | import sys | ||
30 | import logging | ||
31 | import shlex | ||
32 | import glob | ||
33 | import time | ||
34 | import bb | ||
35 | import bb.msg | ||
36 | import bb.process | ||
37 | from contextlib import nested | ||
38 | from bb import event, utils | ||
39 | |||
40 | bblogger = logging.getLogger('BitBake') | ||
41 | logger = logging.getLogger('BitBake.Build') | ||
42 | |||
43 | NULL = open(os.devnull, 'r+') | ||
44 | |||
45 | # When we execute a Python function, we'd like certain things | ||
46 | # in all namespaces, hence we add them to __builtins__. | ||
47 | # If we do not do this and use the exec globals, they will | ||
48 | # not be available to subfunctions. | ||
49 | __builtins__['bb'] = bb | ||
50 | __builtins__['os'] = os | ||
51 | |||
52 | class FuncFailed(Exception): | ||
53 | def __init__(self, name = None, logfile = None): | ||
54 | self.logfile = logfile | ||
55 | self.name = name | ||
56 | if name: | ||
57 | self.msg = 'Function failed: %s' % name | ||
58 | else: | ||
59 | self.msg = "Function failed" | ||
60 | |||
61 | def __str__(self): | ||
62 | if self.logfile and os.path.exists(self.logfile): | ||
63 | msg = ("%s (log file is located at %s)" % | ||
64 | (self.msg, self.logfile)) | ||
65 | else: | ||
66 | msg = self.msg | ||
67 | return msg | ||
68 | |||
69 | class TaskBase(event.Event): | ||
70 | """Base class for task events""" | ||
71 | |||
72 | def __init__(self, t, logfile, d): | ||
73 | self._task = t | ||
74 | self._package = d.getVar("PF", True) | ||
75 | self.taskfile = d.getVar("FILE", True) | ||
76 | self.taskname = self._task | ||
77 | self.logfile = logfile | ||
78 | self.time = time.time() | ||
79 | event.Event.__init__(self) | ||
80 | self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName()) | ||
81 | |||
82 | def getTask(self): | ||
83 | return self._task | ||
84 | |||
85 | def setTask(self, task): | ||
86 | self._task = task | ||
87 | |||
88 | def getDisplayName(self): | ||
89 | return bb.event.getName(self)[4:] | ||
90 | |||
91 | task = property(getTask, setTask, None, "task property") | ||
92 | |||
93 | class TaskStarted(TaskBase): | ||
94 | """Task execution started""" | ||
95 | def __init__(self, t, logfile, taskflags, d): | ||
96 | super(TaskStarted, self).__init__(t, logfile, d) | ||
97 | self.taskflags = taskflags | ||
98 | |||
99 | class TaskSucceeded(TaskBase): | ||
100 | """Task execution completed""" | ||
101 | |||
102 | class TaskFailed(TaskBase): | ||
103 | """Task execution failed""" | ||
104 | |||
105 | def __init__(self, task, logfile, metadata, errprinted = False): | ||
106 | self.errprinted = errprinted | ||
107 | super(TaskFailed, self).__init__(task, logfile, metadata) | ||
108 | |||
109 | class TaskFailedSilent(TaskBase): | ||
110 | """Task execution failed (silently)""" | ||
111 | def getDisplayName(self): | ||
112 | # Don't need to tell the user it was silent | ||
113 | return "Failed" | ||
114 | |||
115 | class TaskInvalid(TaskBase): | ||
116 | |||
117 | def __init__(self, task, metadata): | ||
118 | super(TaskInvalid, self).__init__(task, None, metadata) | ||
119 | self._message = "No such task '%s'" % task | ||
120 | |||
121 | |||
122 | class LogTee(object): | ||
123 | def __init__(self, logger, outfile): | ||
124 | self.outfile = outfile | ||
125 | self.logger = logger | ||
126 | self.name = self.outfile.name | ||
127 | |||
128 | def write(self, string): | ||
129 | self.logger.plain(string) | ||
130 | self.outfile.write(string) | ||
131 | |||
132 | def __enter__(self): | ||
133 | self.outfile.__enter__() | ||
134 | return self | ||
135 | |||
136 | def __exit__(self, *excinfo): | ||
137 | self.outfile.__exit__(*excinfo) | ||
138 | |||
139 | def __repr__(self): | ||
140 | return '<LogTee {0}>'.format(self.name) | ||
141 | def flush(self): | ||
142 | self.outfile.flush() | ||
143 | |||
144 | def exec_func(func, d, dirs = None): | ||
145 | """Execute a BB 'function'""" | ||
146 | |||
147 | body = d.getVar(func) | ||
148 | if not body: | ||
149 | if body is None: | ||
150 | logger.warn("Function %s doesn't exist", func) | ||
151 | return | ||
152 | |||
153 | flags = d.getVarFlags(func) | ||
154 | cleandirs = flags.get('cleandirs') | ||
155 | if cleandirs: | ||
156 | for cdir in d.expand(cleandirs).split(): | ||
157 | bb.utils.remove(cdir, True) | ||
158 | bb.utils.mkdirhier(cdir) | ||
159 | |||
160 | if dirs is None: | ||
161 | dirs = flags.get('dirs') | ||
162 | if dirs: | ||
163 | dirs = d.expand(dirs).split() | ||
164 | |||
165 | if dirs: | ||
166 | for adir in dirs: | ||
167 | bb.utils.mkdirhier(adir) | ||
168 | adir = dirs[-1] | ||
169 | else: | ||
170 | adir = d.getVar('B', True) | ||
171 | bb.utils.mkdirhier(adir) | ||
172 | |||
173 | ispython = flags.get('python') | ||
174 | |||
175 | lockflag = flags.get('lockfiles') | ||
176 | if lockflag: | ||
177 | lockfiles = [f for f in d.expand(lockflag).split()] | ||
178 | else: | ||
179 | lockfiles = None | ||
180 | |||
181 | tempdir = d.getVar('T', True) | ||
182 | |||
183 | # or func allows items to be executed outside of the normal | ||
184 | # task set, such as buildhistory | ||
185 | task = d.getVar('BB_RUNTASK', True) or func | ||
186 | if task == func: | ||
187 | taskfunc = task | ||
188 | else: | ||
189 | taskfunc = "%s.%s" % (task, func) | ||
190 | |||
191 | runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" | ||
192 | runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid()) | ||
193 | runfile = os.path.join(tempdir, runfn) | ||
194 | bb.utils.mkdirhier(os.path.dirname(runfile)) | ||
195 | |||
196 | # Setup the courtesy link to the runfn, only for tasks | ||
197 | # we create the link 'just' before the run script is created | ||
198 | # if we create it after, and if the run script fails, then the | ||
199 | # link won't be created as an exception would be fired. | ||
200 | if task == func: | ||
201 | runlink = os.path.join(tempdir, 'run.{0}'.format(task)) | ||
202 | if runlink: | ||
203 | bb.utils.remove(runlink) | ||
204 | |||
205 | try: | ||
206 | os.symlink(runfn, runlink) | ||
207 | except OSError: | ||
208 | pass | ||
209 | |||
210 | with bb.utils.fileslocked(lockfiles): | ||
211 | if ispython: | ||
212 | exec_func_python(func, d, runfile, cwd=adir) | ||
213 | else: | ||
214 | exec_func_shell(func, d, runfile, cwd=adir) | ||
215 | |||
216 | _functionfmt = """ | ||
217 | def {function}(d): | ||
218 | {body} | ||
219 | |||
220 | {function}(d) | ||
221 | """ | ||
222 | logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") | ||
223 | def exec_func_python(func, d, runfile, cwd=None): | ||
224 | """Execute a python BB 'function'""" | ||
225 | |||
226 | bbfile = d.getVar('FILE', True) | ||
227 | code = _functionfmt.format(function=func, body=d.getVar(func, True)) | ||
228 | bb.utils.mkdirhier(os.path.dirname(runfile)) | ||
229 | with open(runfile, 'w') as script: | ||
230 | bb.data.emit_func_python(func, script, d) | ||
231 | |||
232 | if cwd: | ||
233 | try: | ||
234 | olddir = os.getcwd() | ||
235 | except OSError: | ||
236 | olddir = None | ||
237 | os.chdir(cwd) | ||
238 | |||
239 | bb.debug(2, "Executing python function %s" % func) | ||
240 | |||
241 | try: | ||
242 | comp = utils.better_compile(code, func, bbfile) | ||
243 | utils.better_exec(comp, {"d": d}, code, bbfile) | ||
244 | except (bb.parse.SkipRecipe, bb.build.FuncFailed): | ||
245 | raise | ||
246 | except: | ||
247 | raise FuncFailed(func, None) | ||
248 | finally: | ||
249 | bb.debug(2, "Python function %s finished" % func) | ||
250 | |||
251 | if cwd and olddir: | ||
252 | try: | ||
253 | os.chdir(olddir) | ||
254 | except OSError: | ||
255 | pass | ||
256 | |||
257 | def shell_trap_code(): | ||
258 | return '''#!/bin/sh\n | ||
259 | # Emit a useful diagnostic if something fails: | ||
260 | bb_exit_handler() { | ||
261 | ret=$? | ||
262 | case $ret in | ||
263 | 0) ;; | ||
264 | *) case $BASH_VERSION in | ||
265 | "") echo "WARNING: exit code $ret from a shell command.";; | ||
266 | *) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from | ||
267 | \"$BASH_COMMAND\"";; | ||
268 | esac | ||
269 | exit $ret | ||
270 | esac | ||
271 | } | ||
272 | trap 'bb_exit_handler' 0 | ||
273 | set -e | ||
274 | ''' | ||
275 | |||
276 | def exec_func_shell(func, d, runfile, cwd=None): | ||
277 | """Execute a shell function from the metadata | ||
278 | |||
279 | Note on directory behavior. The 'dirs' varflag should contain a list | ||
280 | of the directories you need created prior to execution. The last | ||
281 | item in the list is where we will chdir/cd to. | ||
282 | """ | ||
283 | |||
284 | # Don't let the emitted shell script override PWD | ||
285 | d.delVarFlag('PWD', 'export') | ||
286 | |||
287 | with open(runfile, 'w') as script: | ||
288 | script.write(shell_trap_code()) | ||
289 | |||
290 | bb.data.emit_func(func, script, d) | ||
291 | |||
292 | if bb.msg.loggerVerboseLogs: | ||
293 | script.write("set -x\n") | ||
294 | if cwd: | ||
295 | script.write("cd '%s'\n" % cwd) | ||
296 | script.write("%s\n" % func) | ||
297 | script.write(''' | ||
298 | # cleanup | ||
299 | ret=$? | ||
300 | trap '' 0 | ||
301 | exit $? | ||
302 | ''') | ||
303 | |||
304 | os.chmod(runfile, 0775) | ||
305 | |||
306 | cmd = runfile | ||
307 | if d.getVarFlag(func, 'fakeroot'): | ||
308 | fakerootcmd = d.getVar('FAKEROOT', True) | ||
309 | if fakerootcmd: | ||
310 | cmd = [fakerootcmd, runfile] | ||
311 | |||
312 | if bb.msg.loggerDefaultVerbose: | ||
313 | logfile = LogTee(logger, sys.stdout) | ||
314 | else: | ||
315 | logfile = sys.stdout | ||
316 | |||
317 | bb.debug(2, "Executing shell function %s" % func) | ||
318 | |||
319 | try: | ||
320 | with open(os.devnull, 'r+') as stdin: | ||
321 | bb.process.run(cmd, shell=False, stdin=stdin, log=logfile) | ||
322 | except bb.process.CmdError: | ||
323 | logfn = d.getVar('BB_LOGFILE', True) | ||
324 | raise FuncFailed(func, logfn) | ||
325 | |||
326 | bb.debug(2, "Shell function %s finished" % func) | ||
327 | |||
328 | def _task_data(fn, task, d): | ||
329 | localdata = bb.data.createCopy(d) | ||
330 | localdata.setVar('BB_FILENAME', fn) | ||
331 | localdata.setVar('BB_CURRENTTASK', task[3:]) | ||
332 | localdata.setVar('OVERRIDES', 'task-%s:%s' % | ||
333 | (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False))) | ||
334 | localdata.finalize() | ||
335 | bb.data.expandKeys(localdata) | ||
336 | return localdata | ||
337 | |||
338 | def _exec_task(fn, task, d, quieterr): | ||
339 | """Execute a BB 'task' | ||
340 | |||
341 | Execution of a task involves a bit more setup than executing a function, | ||
342 | running it with its own local metadata, and with some useful variables set. | ||
343 | """ | ||
344 | if not d.getVarFlag(task, 'task'): | ||
345 | event.fire(TaskInvalid(task, d), d) | ||
346 | logger.error("No such task: %s" % task) | ||
347 | return 1 | ||
348 | |||
349 | logger.debug(1, "Executing task %s", task) | ||
350 | |||
351 | localdata = _task_data(fn, task, d) | ||
352 | tempdir = localdata.getVar('T', True) | ||
353 | if not tempdir: | ||
354 | bb.fatal("T variable not set, unable to build") | ||
355 | |||
356 | # Change nice level if we're asked to | ||
357 | nice = localdata.getVar("BB_TASK_NICE_LEVEL", True) | ||
358 | if nice: | ||
359 | curnice = os.nice(0) | ||
360 | nice = int(nice) - curnice | ||
361 | newnice = os.nice(nice) | ||
362 | logger.debug(1, "Renice to %s " % newnice) | ||
363 | |||
364 | bb.utils.mkdirhier(tempdir) | ||
365 | |||
366 | # Determine the logfile to generate | ||
367 | logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}' | ||
368 | logbase = logfmt.format(task=task, pid=os.getpid()) | ||
369 | |||
370 | # Document the order of the tasks... | ||
371 | logorder = os.path.join(tempdir, 'log.task_order') | ||
372 | try: | ||
373 | with open(logorder, 'a') as logorderfile: | ||
374 | logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase)) | ||
375 | except OSError: | ||
376 | logger.exception("Opening log file '%s'", logorder) | ||
377 | pass | ||
378 | |||
379 | # Setup the courtesy link to the logfn | ||
380 | loglink = os.path.join(tempdir, 'log.{0}'.format(task)) | ||
381 | logfn = os.path.join(tempdir, logbase) | ||
382 | if loglink: | ||
383 | bb.utils.remove(loglink) | ||
384 | |||
385 | try: | ||
386 | os.symlink(logbase, loglink) | ||
387 | except OSError: | ||
388 | pass | ||
389 | |||
390 | prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True) | ||
391 | postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True) | ||
392 | |||
393 | class ErrorCheckHandler(logging.Handler): | ||
394 | def __init__(self): | ||
395 | self.triggered = False | ||
396 | logging.Handler.__init__(self, logging.ERROR) | ||
397 | def emit(self, record): | ||
398 | self.triggered = True | ||
399 | |||
400 | # Handle logfiles | ||
401 | si = open('/dev/null', 'r') | ||
402 | try: | ||
403 | bb.utils.mkdirhier(os.path.dirname(logfn)) | ||
404 | logfile = open(logfn, 'w') | ||
405 | except OSError: | ||
406 | logger.exception("Opening log file '%s'", logfn) | ||
407 | pass | ||
408 | |||
409 | # Dup the existing fds so we dont lose them | ||
410 | osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()] | ||
411 | oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()] | ||
412 | ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()] | ||
413 | |||
414 | # Replace those fds with our own | ||
415 | os.dup2(si.fileno(), osi[1]) | ||
416 | os.dup2(logfile.fileno(), oso[1]) | ||
417 | os.dup2(logfile.fileno(), ose[1]) | ||
418 | |||
419 | # Ensure Python logging goes to the logfile | ||
420 | handler = logging.StreamHandler(logfile) | ||
421 | handler.setFormatter(logformatter) | ||
422 | # Always enable full debug output into task logfiles | ||
423 | handler.setLevel(logging.DEBUG - 2) | ||
424 | bblogger.addHandler(handler) | ||
425 | |||
426 | errchk = ErrorCheckHandler() | ||
427 | bblogger.addHandler(errchk) | ||
428 | |||
429 | localdata.setVar('BB_LOGFILE', logfn) | ||
430 | localdata.setVar('BB_RUNTASK', task) | ||
431 | |||
432 | flags = localdata.getVarFlags(task) | ||
433 | |||
434 | event.fire(TaskStarted(task, logfn, flags, localdata), localdata) | ||
435 | try: | ||
436 | for func in (prefuncs or '').split(): | ||
437 | exec_func(func, localdata) | ||
438 | exec_func(task, localdata) | ||
439 | for func in (postfuncs or '').split(): | ||
440 | exec_func(func, localdata) | ||
441 | except FuncFailed as exc: | ||
442 | if quieterr: | ||
443 | event.fire(TaskFailedSilent(task, logfn, localdata), localdata) | ||
444 | else: | ||
445 | errprinted = errchk.triggered | ||
446 | logger.error(str(exc)) | ||
447 | event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata) | ||
448 | return 1 | ||
449 | finally: | ||
450 | sys.stdout.flush() | ||
451 | sys.stderr.flush() | ||
452 | |||
453 | bblogger.removeHandler(handler) | ||
454 | |||
455 | # Restore the backup fds | ||
456 | os.dup2(osi[0], osi[1]) | ||
457 | os.dup2(oso[0], oso[1]) | ||
458 | os.dup2(ose[0], ose[1]) | ||
459 | |||
460 | # Close the backup fds | ||
461 | os.close(osi[0]) | ||
462 | os.close(oso[0]) | ||
463 | os.close(ose[0]) | ||
464 | si.close() | ||
465 | |||
466 | logfile.close() | ||
467 | if os.path.exists(logfn) and os.path.getsize(logfn) == 0: | ||
468 | logger.debug(2, "Zero size logfn %s, removing", logfn) | ||
469 | bb.utils.remove(logfn) | ||
470 | bb.utils.remove(loglink) | ||
471 | event.fire(TaskSucceeded(task, logfn, localdata), localdata) | ||
472 | |||
473 | if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'): | ||
474 | make_stamp(task, localdata) | ||
475 | |||
476 | return 0 | ||
477 | |||
478 | def exec_task(fn, task, d, profile = False): | ||
479 | try: | ||
480 | quieterr = False | ||
481 | if d.getVarFlag(task, "quieterrors") is not None: | ||
482 | quieterr = True | ||
483 | |||
484 | if profile: | ||
485 | profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task) | ||
486 | try: | ||
487 | import cProfile as profile | ||
488 | except: | ||
489 | import profile | ||
490 | prof = profile.Profile() | ||
491 | ret = profile.Profile.runcall(prof, _exec_task, fn, task, d, quieterr) | ||
492 | prof.dump_stats(profname) | ||
493 | bb.utils.process_profilelog(profname) | ||
494 | |||
495 | return ret | ||
496 | else: | ||
497 | return _exec_task(fn, task, d, quieterr) | ||
498 | |||
499 | except Exception: | ||
500 | from traceback import format_exc | ||
501 | if not quieterr: | ||
502 | logger.error("Build of %s failed" % (task)) | ||
503 | logger.error(format_exc()) | ||
504 | failedevent = TaskFailed(task, None, d, True) | ||
505 | event.fire(failedevent, d) | ||
506 | return 1 | ||
507 | |||
508 | def stamp_internal(taskname, d, file_name, baseonly=False): | ||
509 | """ | ||
510 | Internal stamp helper function | ||
511 | Makes sure the stamp directory exists | ||
512 | Returns the stamp path+filename | ||
513 | |||
514 | In the bitbake core, d can be a CacheData and file_name will be set. | ||
515 | When called in task context, d will be a data store, file_name will not be set | ||
516 | """ | ||
517 | taskflagname = taskname | ||
518 | if taskname.endswith("_setscene") and taskname != "do_setscene": | ||
519 | taskflagname = taskname.replace("_setscene", "") | ||
520 | |||
521 | if file_name: | ||
522 | stamp = d.stamp_base[file_name].get(taskflagname) or d.stamp[file_name] | ||
523 | extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" | ||
524 | else: | ||
525 | stamp = d.getVarFlag(taskflagname, 'stamp-base', True) or d.getVar('STAMP', True) | ||
526 | file_name = d.getVar('BB_FILENAME', True) | ||
527 | extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" | ||
528 | |||
529 | if baseonly: | ||
530 | return stamp | ||
531 | |||
532 | if not stamp: | ||
533 | return | ||
534 | |||
535 | stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo) | ||
536 | |||
537 | stampdir = os.path.dirname(stamp) | ||
538 | if bb.parse.cached_mtime_noerror(stampdir) == 0: | ||
539 | bb.utils.mkdirhier(stampdir) | ||
540 | |||
541 | return stamp | ||
542 | |||
543 | def stamp_cleanmask_internal(taskname, d, file_name): | ||
544 | """ | ||
545 | Internal stamp helper function to generate stamp cleaning mask | ||
546 | Returns the stamp path+filename | ||
547 | |||
548 | In the bitbake core, d can be a CacheData and file_name will be set. | ||
549 | When called in task context, d will be a data store, file_name will not be set | ||
550 | """ | ||
551 | taskflagname = taskname | ||
552 | if taskname.endswith("_setscene") and taskname != "do_setscene": | ||
553 | taskflagname = taskname.replace("_setscene", "") | ||
554 | |||
555 | if file_name: | ||
556 | stamp = d.stamp_base_clean[file_name].get(taskflagname) or d.stampclean[file_name] | ||
557 | extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" | ||
558 | else: | ||
559 | stamp = d.getVarFlag(taskflagname, 'stamp-base-clean', True) or d.getVar('STAMPCLEAN', True) | ||
560 | file_name = d.getVar('BB_FILENAME', True) | ||
561 | extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" | ||
562 | |||
563 | if not stamp: | ||
564 | return [] | ||
565 | |||
566 | cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo) | ||
567 | |||
568 | return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")] | ||
569 | |||
570 | def make_stamp(task, d, file_name = None): | ||
571 | """ | ||
572 | Creates/updates a stamp for a given task | ||
573 | (d can be a data dict or dataCache) | ||
574 | """ | ||
575 | cleanmask = stamp_cleanmask_internal(task, d, file_name) | ||
576 | for mask in cleanmask: | ||
577 | for name in glob.glob(mask): | ||
578 | # Preserve sigdata files in the stamps directory | ||
579 | if "sigdata" in name: | ||
580 | continue | ||
581 | # Preserve taint files in the stamps directory | ||
582 | if name.endswith('.taint'): | ||
583 | continue | ||
584 | os.unlink(name) | ||
585 | |||
586 | stamp = stamp_internal(task, d, file_name) | ||
587 | # Remove the file and recreate to force timestamp | ||
588 | # change on broken NFS filesystems | ||
589 | if stamp: | ||
590 | bb.utils.remove(stamp) | ||
591 | open(stamp, "w").close() | ||
592 | |||
593 | # If we're in task context, write out a signature file for each task | ||
594 | # as it completes | ||
595 | if not task.endswith("_setscene") and task != "do_setscene" and not file_name: | ||
596 | stampbase = stamp_internal(task, d, None, True) | ||
597 | file_name = d.getVar('BB_FILENAME', True) | ||
598 | bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True) | ||
599 | |||
600 | def del_stamp(task, d, file_name = None): | ||
601 | """ | ||
602 | Removes a stamp for a given task | ||
603 | (d can be a data dict or dataCache) | ||
604 | """ | ||
605 | stamp = stamp_internal(task, d, file_name) | ||
606 | bb.utils.remove(stamp) | ||
607 | |||
608 | def write_taint(task, d, file_name = None): | ||
609 | """ | ||
610 | Creates a "taint" file which will force the specified task and its | ||
611 | dependents to be re-run the next time by influencing the value of its | ||
612 | taskhash. | ||
613 | (d can be a data dict or dataCache) | ||
614 | """ | ||
615 | import uuid | ||
616 | if file_name: | ||
617 | taintfn = d.stamp[file_name] + '.' + task + '.taint' | ||
618 | else: | ||
619 | taintfn = d.getVar('STAMP', True) + '.' + task + '.taint' | ||
620 | bb.utils.mkdirhier(os.path.dirname(taintfn)) | ||
621 | # The specific content of the taint file is not really important, | ||
622 | # we just need it to be random, so a random UUID is used | ||
623 | with open(taintfn, 'w') as taintf: | ||
624 | taintf.write(str(uuid.uuid4())) | ||
625 | |||
626 | def stampfile(taskname, d, file_name = None): | ||
627 | """ | ||
628 | Return the stamp for a given task | ||
629 | (d can be a data dict or dataCache) | ||
630 | """ | ||
631 | return stamp_internal(taskname, d, file_name) | ||
632 | |||
633 | def add_tasks(tasklist, deltasklist, d): | ||
634 | task_deps = d.getVar('_task_deps') | ||
635 | if not task_deps: | ||
636 | task_deps = {} | ||
637 | if not 'tasks' in task_deps: | ||
638 | task_deps['tasks'] = [] | ||
639 | if not 'parents' in task_deps: | ||
640 | task_deps['parents'] = {} | ||
641 | |||
642 | for task in tasklist: | ||
643 | task = d.expand(task) | ||
644 | |||
645 | if task in deltasklist: | ||
646 | continue | ||
647 | |||
648 | d.setVarFlag(task, 'task', 1) | ||
649 | |||
650 | if not task in task_deps['tasks']: | ||
651 | task_deps['tasks'].append(task) | ||
652 | |||
653 | flags = d.getVarFlags(task) | ||
654 | def getTask(name): | ||
655 | if not name in task_deps: | ||
656 | task_deps[name] = {} | ||
657 | if name in flags: | ||
658 | deptask = d.expand(flags[name]) | ||
659 | task_deps[name][task] = deptask | ||
660 | getTask('depends') | ||
661 | getTask('rdepends') | ||
662 | getTask('deptask') | ||
663 | getTask('rdeptask') | ||
664 | getTask('recrdeptask') | ||
665 | getTask('recideptask') | ||
666 | getTask('nostamp') | ||
667 | getTask('fakeroot') | ||
668 | getTask('noexec') | ||
669 | getTask('umask') | ||
670 | task_deps['parents'][task] = [] | ||
671 | if 'deps' in flags: | ||
672 | for dep in flags['deps']: | ||
673 | dep = d.expand(dep) | ||
674 | task_deps['parents'][task].append(dep) | ||
675 | |||
676 | # don't assume holding a reference | ||
677 | d.setVar('_task_deps', task_deps) | ||
678 | |||
679 | def addtask(task, before, after, d): | ||
680 | if task[:3] != "do_": | ||
681 | task = "do_" + task | ||
682 | |||
683 | d.setVarFlag(task, "task", 1) | ||
684 | bbtasks = d.getVar('__BBTASKS') or [] | ||
685 | if not task in bbtasks: | ||
686 | bbtasks.append(task) | ||
687 | d.setVar('__BBTASKS', bbtasks) | ||
688 | |||
689 | existing = d.getVarFlag(task, "deps") or [] | ||
690 | if after is not None: | ||
691 | # set up deps for function | ||
692 | for entry in after.split(): | ||
693 | if entry not in existing: | ||
694 | existing.append(entry) | ||
695 | d.setVarFlag(task, "deps", existing) | ||
696 | if before is not None: | ||
697 | # set up things that depend on this func | ||
698 | for entry in before.split(): | ||
699 | existing = d.getVarFlag(entry, "deps") or [] | ||
700 | if task not in existing: | ||
701 | d.setVarFlag(entry, "deps", [task] + existing) | ||
702 | |||
703 | def deltask(task, d): | ||
704 | if task[:3] != "do_": | ||
705 | task = "do_" + task | ||
706 | |||
707 | bbtasks = d.getVar('__BBDELTASKS') or [] | ||
708 | if not task in bbtasks: | ||
709 | bbtasks.append(task) | ||
710 | d.setVar('__BBDELTASKS', bbtasks) | ||
711 | |||
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py new file mode 100644 index 0000000..a1dde96 --- /dev/null +++ b/bitbake/lib/bb/cache.py | |||
@@ -0,0 +1,837 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # BitBake Cache implementation | ||
5 | # | ||
6 | # Caching of bitbake variables before task execution | ||
7 | |||
8 | # Copyright (C) 2006 Richard Purdie | ||
9 | # Copyright (C) 2012 Intel Corporation | ||
10 | |||
11 | # but small sections based on code from bin/bitbake: | ||
12 | # Copyright (C) 2003, 2004 Chris Larson | ||
13 | # Copyright (C) 2003, 2004 Phil Blundell | ||
14 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
15 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
16 | # Copyright (C) 2005 ROAD GmbH | ||
17 | # | ||
18 | # This program is free software; you can redistribute it and/or modify | ||
19 | # it under the terms of the GNU General Public License version 2 as | ||
20 | # published by the Free Software Foundation. | ||
21 | # | ||
22 | # This program is distributed in the hope that it will be useful, | ||
23 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
24 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
25 | # GNU General Public License for more details. | ||
26 | # | ||
27 | # You should have received a copy of the GNU General Public License along | ||
28 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
29 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
30 | |||
31 | |||
32 | import os | ||
33 | import logging | ||
34 | from collections import defaultdict | ||
35 | import bb.utils | ||
36 | |||
37 | logger = logging.getLogger("BitBake.Cache") | ||
38 | |||
39 | try: | ||
40 | import cPickle as pickle | ||
41 | except ImportError: | ||
42 | import pickle | ||
43 | logger.info("Importing cPickle failed. " | ||
44 | "Falling back to a very slow implementation.") | ||
45 | |||
46 | __cache_version__ = "148" | ||
47 | |||
48 | def getCacheFile(path, filename, data_hash): | ||
49 | return os.path.join(path, filename + "." + data_hash) | ||
50 | |||
51 | # RecipeInfoCommon defines common data retrieving methods | ||
52 | # from meta data for caches. CoreRecipeInfo as well as other | ||
53 | # Extra RecipeInfo needs to inherit this class | ||
54 | class RecipeInfoCommon(object): | ||
55 | |||
56 | @classmethod | ||
57 | def listvar(cls, var, metadata): | ||
58 | return cls.getvar(var, metadata).split() | ||
59 | |||
60 | @classmethod | ||
61 | def intvar(cls, var, metadata): | ||
62 | return int(cls.getvar(var, metadata) or 0) | ||
63 | |||
64 | @classmethod | ||
65 | def depvar(cls, var, metadata): | ||
66 | return bb.utils.explode_deps(cls.getvar(var, metadata)) | ||
67 | |||
68 | @classmethod | ||
69 | def pkgvar(cls, var, packages, metadata): | ||
70 | return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) | ||
71 | for pkg in packages) | ||
72 | |||
73 | @classmethod | ||
74 | def taskvar(cls, var, tasks, metadata): | ||
75 | return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) | ||
76 | for task in tasks) | ||
77 | |||
78 | @classmethod | ||
79 | def flaglist(cls, flag, varlist, metadata, squash=False): | ||
80 | out_dict = dict((var, metadata.getVarFlag(var, flag, True)) | ||
81 | for var in varlist) | ||
82 | if squash: | ||
83 | return dict((k,v) for (k,v) in out_dict.iteritems() if v) | ||
84 | else: | ||
85 | return out_dict | ||
86 | |||
87 | @classmethod | ||
88 | def getvar(cls, var, metadata): | ||
89 | return metadata.getVar(var, True) or '' | ||
90 | |||
91 | |||
92 | class CoreRecipeInfo(RecipeInfoCommon): | ||
93 | __slots__ = () | ||
94 | |||
95 | cachefile = "bb_cache.dat" | ||
96 | |||
97 | def __init__(self, filename, metadata): | ||
98 | self.file_depends = metadata.getVar('__depends', False) | ||
99 | self.timestamp = bb.parse.cached_mtime(filename) | ||
100 | self.variants = self.listvar('__VARIANTS', metadata) + [''] | ||
101 | self.appends = self.listvar('__BBAPPEND', metadata) | ||
102 | self.nocache = self.getvar('__BB_DONT_CACHE', metadata) | ||
103 | |||
104 | self.skipreason = self.getvar('__SKIPPED', metadata) | ||
105 | if self.skipreason: | ||
106 | self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0] | ||
107 | self.skipped = True | ||
108 | self.provides = self.depvar('PROVIDES', metadata) | ||
109 | self.rprovides = self.depvar('RPROVIDES', metadata) | ||
110 | return | ||
111 | |||
112 | self.tasks = metadata.getVar('__BBTASKS', False) | ||
113 | |||
114 | self.pn = self.getvar('PN', metadata) | ||
115 | self.packages = self.listvar('PACKAGES', metadata) | ||
116 | if not self.pn in self.packages: | ||
117 | self.packages.append(self.pn) | ||
118 | |||
119 | self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata) | ||
120 | self.hashfilename = self.getvar('BB_HASHFILENAME', metadata) | ||
121 | |||
122 | self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}} | ||
123 | |||
124 | self.skipped = False | ||
125 | self.pe = self.getvar('PE', metadata) | ||
126 | self.pv = self.getvar('PV', metadata) | ||
127 | self.pr = self.getvar('PR', metadata) | ||
128 | self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata) | ||
129 | self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata) | ||
130 | self.stamp = self.getvar('STAMP', metadata) | ||
131 | self.stampclean = self.getvar('STAMPCLEAN', metadata) | ||
132 | self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata) | ||
133 | self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata) | ||
134 | self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata) | ||
135 | self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True) | ||
136 | self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata) | ||
137 | self.depends = self.depvar('DEPENDS', metadata) | ||
138 | self.provides = self.depvar('PROVIDES', metadata) | ||
139 | self.rdepends = self.depvar('RDEPENDS', metadata) | ||
140 | self.rprovides = self.depvar('RPROVIDES', metadata) | ||
141 | self.rrecommends = self.depvar('RRECOMMENDS', metadata) | ||
142 | self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata) | ||
143 | self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata) | ||
144 | self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata) | ||
145 | self.inherits = self.getvar('__inherit_cache', metadata) | ||
146 | self.fakerootenv = self.getvar('FAKEROOTENV', metadata) | ||
147 | self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata) | ||
148 | self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata) | ||
149 | |||
150 | @classmethod | ||
151 | def init_cacheData(cls, cachedata): | ||
152 | # CacheData in Core RecipeInfo Class | ||
153 | cachedata.task_deps = {} | ||
154 | cachedata.pkg_fn = {} | ||
155 | cachedata.pkg_pn = defaultdict(list) | ||
156 | cachedata.pkg_pepvpr = {} | ||
157 | cachedata.pkg_dp = {} | ||
158 | |||
159 | cachedata.stamp = {} | ||
160 | cachedata.stampclean = {} | ||
161 | cachedata.stamp_base = {} | ||
162 | cachedata.stamp_base_clean = {} | ||
163 | cachedata.stamp_extrainfo = {} | ||
164 | cachedata.file_checksums = {} | ||
165 | cachedata.fn_provides = {} | ||
166 | cachedata.pn_provides = defaultdict(list) | ||
167 | cachedata.all_depends = [] | ||
168 | |||
169 | cachedata.deps = defaultdict(list) | ||
170 | cachedata.packages = defaultdict(list) | ||
171 | cachedata.providers = defaultdict(list) | ||
172 | cachedata.rproviders = defaultdict(list) | ||
173 | cachedata.packages_dynamic = defaultdict(list) | ||
174 | |||
175 | cachedata.rundeps = defaultdict(lambda: defaultdict(list)) | ||
176 | cachedata.runrecs = defaultdict(lambda: defaultdict(list)) | ||
177 | cachedata.possible_world = [] | ||
178 | cachedata.universe_target = [] | ||
179 | cachedata.hashfn = {} | ||
180 | |||
181 | cachedata.basetaskhash = {} | ||
182 | cachedata.inherits = {} | ||
183 | cachedata.fakerootenv = {} | ||
184 | cachedata.fakerootnoenv = {} | ||
185 | cachedata.fakerootdirs = {} | ||
186 | |||
187 | def add_cacheData(self, cachedata, fn): | ||
188 | cachedata.task_deps[fn] = self.task_deps | ||
189 | cachedata.pkg_fn[fn] = self.pn | ||
190 | cachedata.pkg_pn[self.pn].append(fn) | ||
191 | cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr) | ||
192 | cachedata.pkg_dp[fn] = self.defaultpref | ||
193 | cachedata.stamp[fn] = self.stamp | ||
194 | cachedata.stampclean[fn] = self.stampclean | ||
195 | cachedata.stamp_base[fn] = self.stamp_base | ||
196 | cachedata.stamp_base_clean[fn] = self.stamp_base_clean | ||
197 | cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo | ||
198 | cachedata.file_checksums[fn] = self.file_checksums | ||
199 | |||
200 | provides = [self.pn] | ||
201 | for provide in self.provides: | ||
202 | if provide not in provides: | ||
203 | provides.append(provide) | ||
204 | cachedata.fn_provides[fn] = provides | ||
205 | |||
206 | for provide in provides: | ||
207 | cachedata.providers[provide].append(fn) | ||
208 | if provide not in cachedata.pn_provides[self.pn]: | ||
209 | cachedata.pn_provides[self.pn].append(provide) | ||
210 | |||
211 | for dep in self.depends: | ||
212 | if dep not in cachedata.deps[fn]: | ||
213 | cachedata.deps[fn].append(dep) | ||
214 | if dep not in cachedata.all_depends: | ||
215 | cachedata.all_depends.append(dep) | ||
216 | |||
217 | rprovides = self.rprovides | ||
218 | for package in self.packages: | ||
219 | cachedata.packages[package].append(fn) | ||
220 | rprovides += self.rprovides_pkg[package] | ||
221 | |||
222 | for rprovide in rprovides: | ||
223 | cachedata.rproviders[rprovide].append(fn) | ||
224 | |||
225 | for package in self.packages_dynamic: | ||
226 | cachedata.packages_dynamic[package].append(fn) | ||
227 | |||
228 | # Build hash of runtime depends and recommends | ||
229 | for package in self.packages + [self.pn]: | ||
230 | cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package] | ||
231 | cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package] | ||
232 | |||
233 | # Collect files we may need for possible world-dep | ||
234 | # calculations | ||
235 | if self.not_world: | ||
236 | logger.debug(1, "EXCLUDE FROM WORLD: %s", fn) | ||
237 | else: | ||
238 | cachedata.possible_world.append(fn) | ||
239 | |||
240 | # create a collection of all targets for sanity checking | ||
241 | # tasks, such as upstream versions, license, and tools for | ||
242 | # task and image creation. | ||
243 | cachedata.universe_target.append(self.pn) | ||
244 | |||
245 | cachedata.hashfn[fn] = self.hashfilename | ||
246 | for task, taskhash in self.basetaskhashes.iteritems(): | ||
247 | identifier = '%s.%s' % (fn, task) | ||
248 | cachedata.basetaskhash[identifier] = taskhash | ||
249 | |||
250 | cachedata.inherits[fn] = self.inherits | ||
251 | cachedata.fakerootenv[fn] = self.fakerootenv | ||
252 | cachedata.fakerootnoenv[fn] = self.fakerootnoenv | ||
253 | cachedata.fakerootdirs[fn] = self.fakerootdirs | ||
254 | |||
255 | |||
256 | |||
257 | class Cache(object): | ||
258 | """ | ||
259 | BitBake Cache implementation | ||
260 | """ | ||
261 | |||
262 | def __init__(self, data, data_hash, caches_array): | ||
263 | # Pass caches_array information into Cache Constructor | ||
264 | # It will be used later for deciding whether we | ||
265 | # need extra cache file dump/load support | ||
266 | self.caches_array = caches_array | ||
267 | self.cachedir = data.getVar("CACHE", True) | ||
268 | self.clean = set() | ||
269 | self.checked = set() | ||
270 | self.depends_cache = {} | ||
271 | self.data = None | ||
272 | self.data_fn = None | ||
273 | self.cacheclean = True | ||
274 | self.data_hash = data_hash | ||
275 | |||
276 | if self.cachedir in [None, '']: | ||
277 | self.has_cache = False | ||
278 | logger.info("Not using a cache. " | ||
279 | "Set CACHE = <directory> to enable.") | ||
280 | return | ||
281 | |||
282 | self.has_cache = True | ||
283 | self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash) | ||
284 | |||
285 | logger.debug(1, "Using cache in '%s'", self.cachedir) | ||
286 | bb.utils.mkdirhier(self.cachedir) | ||
287 | |||
288 | cache_ok = True | ||
289 | if self.caches_array: | ||
290 | for cache_class in self.caches_array: | ||
291 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
292 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | ||
293 | cache_ok = cache_ok and os.path.exists(cachefile) | ||
294 | cache_class.init_cacheData(self) | ||
295 | if cache_ok: | ||
296 | self.load_cachefile() | ||
297 | elif os.path.isfile(self.cachefile): | ||
298 | logger.info("Out of date cache found, rebuilding...") | ||
299 | |||
300 | def load_cachefile(self): | ||
301 | # Firstly, using core cache file information for | ||
302 | # valid checking | ||
303 | with open(self.cachefile, "rb") as cachefile: | ||
304 | pickled = pickle.Unpickler(cachefile) | ||
305 | try: | ||
306 | cache_ver = pickled.load() | ||
307 | bitbake_ver = pickled.load() | ||
308 | except Exception: | ||
309 | logger.info('Invalid cache, rebuilding...') | ||
310 | return | ||
311 | |||
312 | if cache_ver != __cache_version__: | ||
313 | logger.info('Cache version mismatch, rebuilding...') | ||
314 | return | ||
315 | elif bitbake_ver != bb.__version__: | ||
316 | logger.info('Bitbake version mismatch, rebuilding...') | ||
317 | return | ||
318 | |||
319 | |||
320 | cachesize = 0 | ||
321 | previous_progress = 0 | ||
322 | previous_percent = 0 | ||
323 | |||
324 | # Calculate the correct cachesize of all those cache files | ||
325 | for cache_class in self.caches_array: | ||
326 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
327 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | ||
328 | with open(cachefile, "rb") as cachefile: | ||
329 | cachesize += os.fstat(cachefile.fileno()).st_size | ||
330 | |||
331 | bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) | ||
332 | |||
333 | for cache_class in self.caches_array: | ||
334 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
335 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | ||
336 | with open(cachefile, "rb") as cachefile: | ||
337 | pickled = pickle.Unpickler(cachefile) | ||
338 | while cachefile: | ||
339 | try: | ||
340 | key = pickled.load() | ||
341 | value = pickled.load() | ||
342 | except Exception: | ||
343 | break | ||
344 | if self.depends_cache.has_key(key): | ||
345 | self.depends_cache[key].append(value) | ||
346 | else: | ||
347 | self.depends_cache[key] = [value] | ||
348 | # only fire events on even percentage boundaries | ||
349 | current_progress = cachefile.tell() + previous_progress | ||
350 | current_percent = 100 * current_progress / cachesize | ||
351 | if current_percent > previous_percent: | ||
352 | previous_percent = current_percent | ||
353 | bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), | ||
354 | self.data) | ||
355 | |||
356 | previous_progress += current_progress | ||
357 | |||
358 | # Note: depends cache number is corresponding to the parsing file numbers. | ||
359 | # The same file has several caches, still regarded as one item in the cache | ||
360 | bb.event.fire(bb.event.CacheLoadCompleted(cachesize, | ||
361 | len(self.depends_cache)), | ||
362 | self.data) | ||
363 | |||
364 | |||
365 | @staticmethod | ||
366 | def virtualfn2realfn(virtualfn): | ||
367 | """ | ||
368 | Convert a virtual file name to a real one + the associated subclass keyword | ||
369 | """ | ||
370 | |||
371 | fn = virtualfn | ||
372 | cls = "" | ||
373 | if virtualfn.startswith('virtual:'): | ||
374 | elems = virtualfn.split(':') | ||
375 | cls = ":".join(elems[1:-1]) | ||
376 | fn = elems[-1] | ||
377 | return (fn, cls) | ||
378 | |||
379 | @staticmethod | ||
380 | def realfn2virtual(realfn, cls): | ||
381 | """ | ||
382 | Convert a real filename + the associated subclass keyword to a virtual filename | ||
383 | """ | ||
384 | if cls == "": | ||
385 | return realfn | ||
386 | return "virtual:" + cls + ":" + realfn | ||
387 | |||
388 | @classmethod | ||
389 | def loadDataFull(cls, virtualfn, appends, cfgData): | ||
390 | """ | ||
391 | Return a complete set of data for fn. | ||
392 | To do this, we need to parse the file. | ||
393 | """ | ||
394 | |||
395 | (fn, virtual) = cls.virtualfn2realfn(virtualfn) | ||
396 | |||
397 | logger.debug(1, "Parsing %s (full)", fn) | ||
398 | |||
399 | cfgData.setVar("__ONLYFINALISE", virtual or "default") | ||
400 | bb_data = cls.load_bbfile(fn, appends, cfgData) | ||
401 | return bb_data[virtual] | ||
402 | |||
403 | @classmethod | ||
404 | def parse(cls, filename, appends, configdata, caches_array): | ||
405 | """Parse the specified filename, returning the recipe information""" | ||
406 | infos = [] | ||
407 | datastores = cls.load_bbfile(filename, appends, configdata) | ||
408 | depends = [] | ||
409 | for variant, data in sorted(datastores.iteritems(), | ||
410 | key=lambda i: i[0], | ||
411 | reverse=True): | ||
412 | virtualfn = cls.realfn2virtual(filename, variant) | ||
413 | depends = depends + (data.getVar("__depends", False) or []) | ||
414 | if depends and not variant: | ||
415 | data.setVar("__depends", depends) | ||
416 | |||
417 | info_array = [] | ||
418 | for cache_class in caches_array: | ||
419 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
420 | info = cache_class(filename, data) | ||
421 | info_array.append(info) | ||
422 | infos.append((virtualfn, info_array)) | ||
423 | |||
424 | return infos | ||
425 | |||
426 | def load(self, filename, appends, configdata): | ||
427 | """Obtain the recipe information for the specified filename, | ||
428 | using cached values if available, otherwise parsing. | ||
429 | |||
430 | Note that if it does parse to obtain the info, it will not | ||
431 | automatically add the information to the cache or to your | ||
432 | CacheData. Use the add or add_info method to do so after | ||
433 | running this, or use loadData instead.""" | ||
434 | cached = self.cacheValid(filename, appends) | ||
435 | if cached: | ||
436 | infos = [] | ||
437 | # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo] | ||
438 | info_array = self.depends_cache[filename] | ||
439 | for variant in info_array[0].variants: | ||
440 | virtualfn = self.realfn2virtual(filename, variant) | ||
441 | infos.append((virtualfn, self.depends_cache[virtualfn])) | ||
442 | else: | ||
443 | logger.debug(1, "Parsing %s", filename) | ||
444 | return self.parse(filename, appends, configdata, self.caches_array) | ||
445 | |||
446 | return cached, infos | ||
447 | |||
448 | def loadData(self, fn, appends, cfgData, cacheData): | ||
449 | """Load the recipe info for the specified filename, | ||
450 | parsing and adding to the cache if necessary, and adding | ||
451 | the recipe information to the supplied CacheData instance.""" | ||
452 | skipped, virtuals = 0, 0 | ||
453 | |||
454 | cached, infos = self.load(fn, appends, cfgData) | ||
455 | for virtualfn, info_array in infos: | ||
456 | if info_array[0].skipped: | ||
457 | logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason) | ||
458 | skipped += 1 | ||
459 | else: | ||
460 | self.add_info(virtualfn, info_array, cacheData, not cached) | ||
461 | virtuals += 1 | ||
462 | |||
463 | return cached, skipped, virtuals | ||
464 | |||
465 | def cacheValid(self, fn, appends): | ||
466 | """ | ||
467 | Is the cache valid for fn? | ||
468 | Fast version, no timestamps checked. | ||
469 | """ | ||
470 | if fn not in self.checked: | ||
471 | self.cacheValidUpdate(fn, appends) | ||
472 | |||
473 | # Is cache enabled? | ||
474 | if not self.has_cache: | ||
475 | return False | ||
476 | if fn in self.clean: | ||
477 | return True | ||
478 | return False | ||
479 | |||
480 | def cacheValidUpdate(self, fn, appends): | ||
481 | """ | ||
482 | Is the cache valid for fn? | ||
483 | Make thorough (slower) checks including timestamps. | ||
484 | """ | ||
485 | # Is cache enabled? | ||
486 | if not self.has_cache: | ||
487 | return False | ||
488 | |||
489 | self.checked.add(fn) | ||
490 | |||
491 | # File isn't in depends_cache | ||
492 | if not fn in self.depends_cache: | ||
493 | logger.debug(2, "Cache: %s is not cached", fn) | ||
494 | return False | ||
495 | |||
496 | mtime = bb.parse.cached_mtime_noerror(fn) | ||
497 | |||
498 | # Check file still exists | ||
499 | if mtime == 0: | ||
500 | logger.debug(2, "Cache: %s no longer exists", fn) | ||
501 | self.remove(fn) | ||
502 | return False | ||
503 | |||
504 | info_array = self.depends_cache[fn] | ||
505 | # Check the file's timestamp | ||
506 | if mtime != info_array[0].timestamp: | ||
507 | logger.debug(2, "Cache: %s changed", fn) | ||
508 | self.remove(fn) | ||
509 | return False | ||
510 | |||
511 | # Check dependencies are still valid | ||
512 | depends = info_array[0].file_depends | ||
513 | if depends: | ||
514 | for f, old_mtime in depends: | ||
515 | fmtime = bb.parse.cached_mtime_noerror(f) | ||
516 | # Check if file still exists | ||
517 | if old_mtime != 0 and fmtime == 0: | ||
518 | logger.debug(2, "Cache: %s's dependency %s was removed", | ||
519 | fn, f) | ||
520 | self.remove(fn) | ||
521 | return False | ||
522 | |||
523 | if (fmtime != old_mtime): | ||
524 | logger.debug(2, "Cache: %s's dependency %s changed", | ||
525 | fn, f) | ||
526 | self.remove(fn) | ||
527 | return False | ||
528 | |||
529 | if hasattr(info_array[0], 'file_checksums'): | ||
530 | for _, fl in info_array[0].file_checksums.items(): | ||
531 | for f in fl.split(): | ||
532 | if "*" in f: | ||
533 | continue | ||
534 | f, exist = f.split(":") | ||
535 | if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): | ||
536 | logger.debug(2, "Cache: %s's file checksum list file %s changed", | ||
537 | fn, f) | ||
538 | self.remove(fn) | ||
539 | return False | ||
540 | |||
541 | if appends != info_array[0].appends: | ||
542 | logger.debug(2, "Cache: appends for %s changed", fn) | ||
543 | logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends))) | ||
544 | self.remove(fn) | ||
545 | return False | ||
546 | |||
547 | invalid = False | ||
548 | for cls in info_array[0].variants: | ||
549 | virtualfn = self.realfn2virtual(fn, cls) | ||
550 | self.clean.add(virtualfn) | ||
551 | if virtualfn not in self.depends_cache: | ||
552 | logger.debug(2, "Cache: %s is not cached", virtualfn) | ||
553 | invalid = True | ||
554 | |||
555 | # If any one of the variants is not present, mark as invalid for all | ||
556 | if invalid: | ||
557 | for cls in info_array[0].variants: | ||
558 | virtualfn = self.realfn2virtual(fn, cls) | ||
559 | if virtualfn in self.clean: | ||
560 | logger.debug(2, "Cache: Removing %s from cache", virtualfn) | ||
561 | self.clean.remove(virtualfn) | ||
562 | if fn in self.clean: | ||
563 | logger.debug(2, "Cache: Marking %s as not clean", fn) | ||
564 | self.clean.remove(fn) | ||
565 | return False | ||
566 | |||
567 | self.clean.add(fn) | ||
568 | return True | ||
569 | |||
570 | def remove(self, fn): | ||
571 | """ | ||
572 | Remove a fn from the cache | ||
573 | Called from the parser in error cases | ||
574 | """ | ||
575 | if fn in self.depends_cache: | ||
576 | logger.debug(1, "Removing %s from cache", fn) | ||
577 | del self.depends_cache[fn] | ||
578 | if fn in self.clean: | ||
579 | logger.debug(1, "Marking %s as unclean", fn) | ||
580 | self.clean.remove(fn) | ||
581 | |||
582 | def sync(self): | ||
583 | """ | ||
584 | Save the cache | ||
585 | Called from the parser when complete (or exiting) | ||
586 | """ | ||
587 | |||
588 | if not self.has_cache: | ||
589 | return | ||
590 | |||
591 | if self.cacheclean: | ||
592 | logger.debug(2, "Cache is clean, not saving.") | ||
593 | return | ||
594 | |||
595 | file_dict = {} | ||
596 | pickler_dict = {} | ||
597 | for cache_class in self.caches_array: | ||
598 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
599 | cache_class_name = cache_class.__name__ | ||
600 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | ||
601 | file_dict[cache_class_name] = open(cachefile, "wb") | ||
602 | pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL) | ||
603 | |||
604 | pickler_dict['CoreRecipeInfo'].dump(__cache_version__) | ||
605 | pickler_dict['CoreRecipeInfo'].dump(bb.__version__) | ||
606 | |||
607 | try: | ||
608 | for key, info_array in self.depends_cache.iteritems(): | ||
609 | for info in info_array: | ||
610 | if isinstance(info, RecipeInfoCommon): | ||
611 | cache_class_name = info.__class__.__name__ | ||
612 | pickler_dict[cache_class_name].dump(key) | ||
613 | pickler_dict[cache_class_name].dump(info) | ||
614 | finally: | ||
615 | for cache_class in self.caches_array: | ||
616 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
617 | cache_class_name = cache_class.__name__ | ||
618 | file_dict[cache_class_name].close() | ||
619 | |||
620 | del self.depends_cache | ||
621 | |||
622 | @staticmethod | ||
623 | def mtime(cachefile): | ||
624 | return bb.parse.cached_mtime_noerror(cachefile) | ||
625 | |||
626 | def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None): | ||
627 | if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped): | ||
628 | cacheData.add_from_recipeinfo(filename, info_array) | ||
629 | |||
630 | if watcher: | ||
631 | watcher(info_array[0].file_depends) | ||
632 | |||
633 | if not self.has_cache: | ||
634 | return | ||
635 | |||
636 | if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache: | ||
637 | if parsed: | ||
638 | self.cacheclean = False | ||
639 | self.depends_cache[filename] = info_array | ||
640 | |||
641 | def add(self, file_name, data, cacheData, parsed=None): | ||
642 | """ | ||
643 | Save data we need into the cache | ||
644 | """ | ||
645 | |||
646 | realfn = self.virtualfn2realfn(file_name)[0] | ||
647 | |||
648 | info_array = [] | ||
649 | for cache_class in self.caches_array: | ||
650 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
651 | info_array.append(cache_class(realfn, data)) | ||
652 | self.add_info(file_name, info_array, cacheData, parsed) | ||
653 | |||
654 | @staticmethod | ||
655 | def load_bbfile(bbfile, appends, config): | ||
656 | """ | ||
657 | Load and parse one .bb build file | ||
658 | Return the data and whether parsing resulted in the file being skipped | ||
659 | """ | ||
660 | chdir_back = False | ||
661 | |||
662 | from bb import data, parse | ||
663 | |||
664 | # expand tmpdir to include this topdir | ||
665 | data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) | ||
666 | bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) | ||
667 | oldpath = os.path.abspath(os.getcwd()) | ||
668 | parse.cached_mtime_noerror(bbfile_loc) | ||
669 | bb_data = data.init_db(config) | ||
670 | # The ConfHandler first looks if there is a TOPDIR and if not | ||
671 | # then it would call getcwd(). | ||
672 | # Previously, we chdir()ed to bbfile_loc, called the handler | ||
673 | # and finally chdir()ed back, a couple of thousand times. We now | ||
674 | # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet. | ||
675 | if not data.getVar('TOPDIR', bb_data): | ||
676 | chdir_back = True | ||
677 | data.setVar('TOPDIR', bbfile_loc, bb_data) | ||
678 | try: | ||
679 | if appends: | ||
680 | data.setVar('__BBAPPEND', " ".join(appends), bb_data) | ||
681 | bb_data = parse.handle(bbfile, bb_data) | ||
682 | if chdir_back: | ||
683 | os.chdir(oldpath) | ||
684 | return bb_data | ||
685 | except: | ||
686 | if chdir_back: | ||
687 | os.chdir(oldpath) | ||
688 | raise | ||
689 | |||
690 | |||
691 | def init(cooker): | ||
692 | """ | ||
693 | The Objective: Cache the minimum amount of data possible yet get to the | ||
694 | stage of building packages (i.e. tryBuild) without reparsing any .bb files. | ||
695 | |||
696 | To do this, we intercept getVar calls and only cache the variables we see | ||
697 | being accessed. We rely on the cache getVar calls being made for all | ||
698 | variables bitbake might need to use to reach this stage. For each cached | ||
699 | file we need to track: | ||
700 | |||
701 | * Its mtime | ||
702 | * The mtimes of all its dependencies | ||
703 | * Whether it caused a parse.SkipRecipe exception | ||
704 | |||
705 | Files causing parsing errors are evicted from the cache. | ||
706 | |||
707 | """ | ||
708 | return Cache(cooker.configuration.data, cooker.configuration.data_hash) | ||
709 | |||
710 | |||
711 | class CacheData(object): | ||
712 | """ | ||
713 | The data structures we compile from the cached data | ||
714 | """ | ||
715 | |||
716 | def __init__(self, caches_array): | ||
717 | self.caches_array = caches_array | ||
718 | for cache_class in self.caches_array: | ||
719 | if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon): | ||
720 | cache_class.init_cacheData(self) | ||
721 | |||
722 | # Direct cache variables | ||
723 | self.task_queues = {} | ||
724 | self.preferred = {} | ||
725 | self.tasks = {} | ||
726 | # Indirect Cache variables (set elsewhere) | ||
727 | self.ignored_dependencies = [] | ||
728 | self.world_target = set() | ||
729 | self.bbfile_priority = {} | ||
730 | |||
731 | def add_from_recipeinfo(self, fn, info_array): | ||
732 | for info in info_array: | ||
733 | info.add_cacheData(self, fn) | ||
734 | |||
735 | class MultiProcessCache(object): | ||
736 | """ | ||
737 | BitBake multi-process cache implementation | ||
738 | |||
739 | Used by the codeparser & file checksum caches | ||
740 | """ | ||
741 | |||
742 | def __init__(self): | ||
743 | self.cachefile = None | ||
744 | self.cachedata = self.create_cachedata() | ||
745 | self.cachedata_extras = self.create_cachedata() | ||
746 | |||
747 | def init_cache(self, d): | ||
748 | cachedir = (d.getVar("PERSISTENT_DIR", True) or | ||
749 | d.getVar("CACHE", True)) | ||
750 | if cachedir in [None, '']: | ||
751 | return | ||
752 | bb.utils.mkdirhier(cachedir) | ||
753 | self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name) | ||
754 | logger.debug(1, "Using cache in '%s'", self.cachefile) | ||
755 | |||
756 | glf = bb.utils.lockfile(self.cachefile + ".lock") | ||
757 | |||
758 | try: | ||
759 | with open(self.cachefile, "rb") as f: | ||
760 | p = pickle.Unpickler(f) | ||
761 | data, version = p.load() | ||
762 | except: | ||
763 | bb.utils.unlockfile(glf) | ||
764 | return | ||
765 | |||
766 | bb.utils.unlockfile(glf) | ||
767 | |||
768 | if version != self.__class__.CACHE_VERSION: | ||
769 | return | ||
770 | |||
771 | self.cachedata = data | ||
772 | |||
773 | def create_cachedata(self): | ||
774 | data = [{}] | ||
775 | return data | ||
776 | |||
777 | def save_extras(self, d): | ||
778 | if not self.cachefile: | ||
779 | return | ||
780 | |||
781 | glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) | ||
782 | |||
783 | i = os.getpid() | ||
784 | lf = None | ||
785 | while not lf: | ||
786 | lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False) | ||
787 | if not lf or os.path.exists(self.cachefile + "-" + str(i)): | ||
788 | if lf: | ||
789 | bb.utils.unlockfile(lf) | ||
790 | lf = None | ||
791 | i = i + 1 | ||
792 | continue | ||
793 | |||
794 | with open(self.cachefile + "-" + str(i), "wb") as f: | ||
795 | p = pickle.Pickler(f, -1) | ||
796 | p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION]) | ||
797 | |||
798 | bb.utils.unlockfile(lf) | ||
799 | bb.utils.unlockfile(glf) | ||
800 | |||
801 | def merge_data(self, source, dest): | ||
802 | for j in range(0,len(dest)): | ||
803 | for h in source[j]: | ||
804 | if h not in dest[j]: | ||
805 | dest[j][h] = source[j][h] | ||
806 | |||
807 | def save_merge(self, d): | ||
808 | if not self.cachefile: | ||
809 | return | ||
810 | |||
811 | glf = bb.utils.lockfile(self.cachefile + ".lock") | ||
812 | |||
813 | data = self.cachedata | ||
814 | |||
815 | for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: | ||
816 | f = os.path.join(os.path.dirname(self.cachefile), f) | ||
817 | try: | ||
818 | with open(f, "rb") as fd: | ||
819 | p = pickle.Unpickler(fd) | ||
820 | extradata, version = p.load() | ||
821 | except (IOError, EOFError): | ||
822 | os.unlink(f) | ||
823 | continue | ||
824 | |||
825 | if version != self.__class__.CACHE_VERSION: | ||
826 | os.unlink(f) | ||
827 | continue | ||
828 | |||
829 | self.merge_data(extradata, data) | ||
830 | os.unlink(f) | ||
831 | |||
832 | with open(self.cachefile, "wb") as f: | ||
833 | p = pickle.Pickler(f, -1) | ||
834 | p.dump([data, self.__class__.CACHE_VERSION]) | ||
835 | |||
836 | bb.utils.unlockfile(glf) | ||
837 | |||
diff --git a/bitbake/lib/bb/cache_extra.py b/bitbake/lib/bb/cache_extra.py new file mode 100644 index 0000000..83f4959 --- /dev/null +++ b/bitbake/lib/bb/cache_extra.py | |||
@@ -0,0 +1,75 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | # | ||
4 | # Extra RecipeInfo will be all defined in this file. Currently, | ||
5 | # Only Hob (Image Creator) Requests some extra fields. So | ||
6 | # HobRecipeInfo is defined. It's named HobRecipeInfo because it | ||
7 | # is introduced by 'hob'. Users could also introduce other | ||
8 | # RecipeInfo or simply use those already defined RecipeInfo. | ||
9 | # In the following patch, this newly defined new extra RecipeInfo | ||
10 | # will be dynamically loaded and used for loading/saving the extra | ||
11 | # cache fields | ||
12 | |||
13 | # Copyright (C) 2011, Intel Corporation. All rights reserved. | ||
14 | |||
15 | # This program is free software; you can redistribute it and/or modify | ||
16 | # it under the terms of the GNU General Public License version 2 as | ||
17 | # published by the Free Software Foundation. | ||
18 | # | ||
19 | # This program is distributed in the hope that it will be useful, | ||
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
22 | # GNU General Public License for more details. | ||
23 | # | ||
24 | # You should have received a copy of the GNU General Public License along | ||
25 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
26 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
27 | |||
28 | from bb.cache import RecipeInfoCommon | ||
29 | |||
30 | class HobRecipeInfo(RecipeInfoCommon): | ||
31 | __slots__ = () | ||
32 | |||
33 | classname = "HobRecipeInfo" | ||
34 | # please override this member with the correct data cache file | ||
35 | # such as (bb_cache.dat, bb_extracache_hob.dat) | ||
36 | cachefile = "bb_extracache_" + classname +".dat" | ||
37 | |||
38 | # override this member with the list of extra cache fields | ||
39 | # that this class will provide | ||
40 | cachefields = ['summary', 'license', 'section', | ||
41 | 'description', 'homepage', 'bugtracker', | ||
42 | 'prevision', 'files_info'] | ||
43 | |||
44 | def __init__(self, filename, metadata): | ||
45 | |||
46 | self.summary = self.getvar('SUMMARY', metadata) | ||
47 | self.license = self.getvar('LICENSE', metadata) | ||
48 | self.section = self.getvar('SECTION', metadata) | ||
49 | self.description = self.getvar('DESCRIPTION', metadata) | ||
50 | self.homepage = self.getvar('HOMEPAGE', metadata) | ||
51 | self.bugtracker = self.getvar('BUGTRACKER', metadata) | ||
52 | self.prevision = self.getvar('PR', metadata) | ||
53 | self.files_info = self.getvar('FILES_INFO', metadata) | ||
54 | |||
55 | @classmethod | ||
56 | def init_cacheData(cls, cachedata): | ||
57 | # CacheData in Hob RecipeInfo Class | ||
58 | cachedata.summary = {} | ||
59 | cachedata.license = {} | ||
60 | cachedata.section = {} | ||
61 | cachedata.description = {} | ||
62 | cachedata.homepage = {} | ||
63 | cachedata.bugtracker = {} | ||
64 | cachedata.prevision = {} | ||
65 | cachedata.files_info = {} | ||
66 | |||
67 | def add_cacheData(self, cachedata, fn): | ||
68 | cachedata.summary[fn] = self.summary | ||
69 | cachedata.license[fn] = self.license | ||
70 | cachedata.section[fn] = self.section | ||
71 | cachedata.description[fn] = self.description | ||
72 | cachedata.homepage[fn] = self.homepage | ||
73 | cachedata.bugtracker[fn] = self.bugtracker | ||
74 | cachedata.prevision[fn] = self.prevision | ||
75 | cachedata.files_info[fn] = self.files_info | ||
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py new file mode 100644 index 0000000..514ff0b --- /dev/null +++ b/bitbake/lib/bb/checksum.py | |||
@@ -0,0 +1,90 @@ | |||
1 | # Local file checksum cache implementation | ||
2 | # | ||
3 | # Copyright (C) 2012 Intel Corporation | ||
4 | # | ||
5 | # This program is free software; you can redistribute it and/or modify | ||
6 | # it under the terms of the GNU General Public License version 2 as | ||
7 | # published by the Free Software Foundation. | ||
8 | # | ||
9 | # This program is distributed in the hope that it will be useful, | ||
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
12 | # GNU General Public License for more details. | ||
13 | # | ||
14 | # You should have received a copy of the GNU General Public License along | ||
15 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
16 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
17 | |||
18 | import os | ||
19 | import stat | ||
20 | import bb.utils | ||
21 | import logging | ||
22 | from bb.cache import MultiProcessCache | ||
23 | |||
24 | logger = logging.getLogger("BitBake.Cache") | ||
25 | |||
26 | try: | ||
27 | import cPickle as pickle | ||
28 | except ImportError: | ||
29 | import pickle | ||
30 | logger.info("Importing cPickle failed. " | ||
31 | "Falling back to a very slow implementation.") | ||
32 | |||
33 | |||
34 | # mtime cache (non-persistent) | ||
35 | # based upon the assumption that files do not change during bitbake run | ||
36 | class FileMtimeCache(object): | ||
37 | cache = {} | ||
38 | |||
39 | def cached_mtime(self, f): | ||
40 | if f not in self.cache: | ||
41 | self.cache[f] = os.stat(f)[stat.ST_MTIME] | ||
42 | return self.cache[f] | ||
43 | |||
44 | def cached_mtime_noerror(self, f): | ||
45 | if f not in self.cache: | ||
46 | try: | ||
47 | self.cache[f] = os.stat(f)[stat.ST_MTIME] | ||
48 | except OSError: | ||
49 | return 0 | ||
50 | return self.cache[f] | ||
51 | |||
52 | def update_mtime(self, f): | ||
53 | self.cache[f] = os.stat(f)[stat.ST_MTIME] | ||
54 | return self.cache[f] | ||
55 | |||
56 | def clear(self): | ||
57 | self.cache.clear() | ||
58 | |||
59 | # Checksum + mtime cache (persistent) | ||
60 | class FileChecksumCache(MultiProcessCache): | ||
61 | cache_file_name = "local_file_checksum_cache.dat" | ||
62 | CACHE_VERSION = 1 | ||
63 | |||
64 | def __init__(self): | ||
65 | self.mtime_cache = FileMtimeCache() | ||
66 | MultiProcessCache.__init__(self) | ||
67 | |||
68 | def get_checksum(self, f): | ||
69 | entry = self.cachedata[0].get(f) | ||
70 | cmtime = self.mtime_cache.cached_mtime(f) | ||
71 | if entry: | ||
72 | (mtime, hashval) = entry | ||
73 | if cmtime == mtime: | ||
74 | return hashval | ||
75 | else: | ||
76 | bb.debug(2, "file %s changed mtime, recompute checksum" % f) | ||
77 | |||
78 | hashval = bb.utils.md5_file(f) | ||
79 | self.cachedata_extras[0][f] = (cmtime, hashval) | ||
80 | return hashval | ||
81 | |||
82 | def merge_data(self, source, dest): | ||
83 | for h in source[0]: | ||
84 | if h in dest: | ||
85 | (smtime, _) = source[0][h] | ||
86 | (dmtime, _) = dest[0][h] | ||
87 | if smtime > dmtime: | ||
88 | dest[0][h] = source[0][h] | ||
89 | else: | ||
90 | dest[0][h] = source[0][h] | ||
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py new file mode 100644 index 0000000..8b8f91a --- /dev/null +++ b/bitbake/lib/bb/codeparser.py | |||
@@ -0,0 +1,406 @@ | |||
1 | import ast | ||
2 | import codegen | ||
3 | import logging | ||
4 | import os.path | ||
5 | import bb.utils, bb.data | ||
6 | from itertools import chain | ||
7 | from pysh import pyshyacc, pyshlex, sherrors | ||
8 | from bb.cache import MultiProcessCache | ||
9 | |||
10 | |||
11 | logger = logging.getLogger('BitBake.CodeParser') | ||
12 | |||
13 | try: | ||
14 | import cPickle as pickle | ||
15 | except ImportError: | ||
16 | import pickle | ||
17 | logger.info('Importing cPickle failed. Falling back to a very slow implementation.') | ||
18 | |||
19 | |||
20 | def check_indent(codestr): | ||
21 | """If the code is indented, add a top level piece of code to 'remove' the indentation""" | ||
22 | |||
23 | i = 0 | ||
24 | while codestr[i] in ["\n", "\t", " "]: | ||
25 | i = i + 1 | ||
26 | |||
27 | if i == 0: | ||
28 | return codestr | ||
29 | |||
30 | if codestr[i-1] == "\t" or codestr[i-1] == " ": | ||
31 | return "if 1:\n" + codestr | ||
32 | |||
33 | return codestr | ||
34 | |||
35 | |||
36 | # Basically pickle, in python 2.7.3 at least, does badly with data duplication | ||
37 | # upon pickling and unpickling. Combine this with duplicate objects and things | ||
38 | # are a mess. | ||
39 | # | ||
40 | # When the sets are originally created, python calls intern() on the set keys | ||
41 | # which significantly improves memory usage. Sadly the pickle/unpickle process | ||
42 | # doesn't call intern() on the keys and results in the same strings being duplicated | ||
43 | # in memory. This also means pickle will save the same string multiple times in | ||
44 | # the cache file. | ||
45 | # | ||
46 | # By having shell and python cacheline objects with setstate/getstate, we force | ||
47 | # the object creation through our own routine where we can call intern (via internSet). | ||
48 | # | ||
49 | # We also use hashable frozensets and ensure we use references to these so that | ||
50 | # duplicates can be removed, both in memory and in the resulting pickled data. | ||
51 | # | ||
52 | # By playing these games, the size of the cache file shrinks dramatically | ||
53 | # meaning faster load times and the reloaded cache files also consume much less | ||
54 | # memory. Smaller cache files, faster load times and lower memory usage is good. | ||
55 | # | ||
56 | # A custom getstate/setstate using tuples is actually worth 15% cachesize by | ||
57 | # avoiding duplication of the attribute names! | ||
58 | |||
59 | class SetCache(object): | ||
60 | def __init__(self): | ||
61 | self.setcache = {} | ||
62 | |||
63 | def internSet(self, items): | ||
64 | |||
65 | new = [] | ||
66 | for i in items: | ||
67 | new.append(intern(i)) | ||
68 | s = frozenset(new) | ||
69 | if hash(s) in self.setcache: | ||
70 | return self.setcache[hash(s)] | ||
71 | self.setcache[hash(s)] = s | ||
72 | return s | ||
73 | |||
74 | codecache = SetCache() | ||
75 | |||
76 | class pythonCacheLine(object): | ||
77 | def __init__(self, refs, execs, contains): | ||
78 | self.refs = codecache.internSet(refs) | ||
79 | self.execs = codecache.internSet(execs) | ||
80 | self.contains = {} | ||
81 | for c in contains: | ||
82 | self.contains[c] = codecache.internSet(contains[c]) | ||
83 | |||
84 | def __getstate__(self): | ||
85 | return (self.refs, self.execs, self.contains) | ||
86 | |||
87 | def __setstate__(self, state): | ||
88 | (refs, execs, contains) = state | ||
89 | self.__init__(refs, execs, contains) | ||
90 | def __hash__(self): | ||
91 | l = (hash(self.refs), hash(self.execs)) | ||
92 | for c in sorted(self.contains.keys()): | ||
93 | l = l + (c, hash(self.contains[c])) | ||
94 | return hash(l) | ||
95 | |||
96 | class shellCacheLine(object): | ||
97 | def __init__(self, execs): | ||
98 | self.execs = codecache.internSet(execs) | ||
99 | |||
100 | def __getstate__(self): | ||
101 | return (self.execs) | ||
102 | |||
103 | def __setstate__(self, state): | ||
104 | (execs) = state | ||
105 | self.__init__(execs) | ||
106 | def __hash__(self): | ||
107 | return hash(self.execs) | ||
108 | |||
109 | class CodeParserCache(MultiProcessCache): | ||
110 | cache_file_name = "bb_codeparser.dat" | ||
111 | CACHE_VERSION = 7 | ||
112 | |||
113 | def __init__(self): | ||
114 | MultiProcessCache.__init__(self) | ||
115 | self.pythoncache = self.cachedata[0] | ||
116 | self.shellcache = self.cachedata[1] | ||
117 | self.pythoncacheextras = self.cachedata_extras[0] | ||
118 | self.shellcacheextras = self.cachedata_extras[1] | ||
119 | |||
120 | # To avoid duplication in the codeparser cache, keep | ||
121 | # a lookup of hashes of objects we already have | ||
122 | self.pythoncachelines = {} | ||
123 | self.shellcachelines = {} | ||
124 | |||
125 | def newPythonCacheLine(self, refs, execs, contains): | ||
126 | cacheline = pythonCacheLine(refs, execs, contains) | ||
127 | h = hash(cacheline) | ||
128 | if h in self.pythoncachelines: | ||
129 | return self.pythoncachelines[h] | ||
130 | self.pythoncachelines[h] = cacheline | ||
131 | return cacheline | ||
132 | |||
133 | def newShellCacheLine(self, execs): | ||
134 | cacheline = shellCacheLine(execs) | ||
135 | h = hash(cacheline) | ||
136 | if h in self.shellcachelines: | ||
137 | return self.shellcachelines[h] | ||
138 | self.shellcachelines[h] = cacheline | ||
139 | return cacheline | ||
140 | |||
141 | def init_cache(self, d): | ||
142 | MultiProcessCache.init_cache(self, d) | ||
143 | |||
144 | # cachedata gets re-assigned in the parent | ||
145 | self.pythoncache = self.cachedata[0] | ||
146 | self.shellcache = self.cachedata[1] | ||
147 | |||
148 | def create_cachedata(self): | ||
149 | data = [{}, {}] | ||
150 | return data | ||
151 | |||
152 | codeparsercache = CodeParserCache() | ||
153 | |||
154 | def parser_cache_init(d): | ||
155 | codeparsercache.init_cache(d) | ||
156 | |||
157 | def parser_cache_save(d): | ||
158 | codeparsercache.save_extras(d) | ||
159 | |||
160 | def parser_cache_savemerge(d): | ||
161 | codeparsercache.save_merge(d) | ||
162 | |||
163 | Logger = logging.getLoggerClass() | ||
164 | class BufferedLogger(Logger): | ||
165 | def __init__(self, name, level=0, target=None): | ||
166 | Logger.__init__(self, name) | ||
167 | self.setLevel(level) | ||
168 | self.buffer = [] | ||
169 | self.target = target | ||
170 | |||
171 | def handle(self, record): | ||
172 | self.buffer.append(record) | ||
173 | |||
174 | def flush(self): | ||
175 | for record in self.buffer: | ||
176 | self.target.handle(record) | ||
177 | self.buffer = [] | ||
178 | |||
179 | class PythonParser(): | ||
180 | getvars = (".getVar", ".appendVar", ".prependVar") | ||
181 | containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains", "bb.utils.contains_any") | ||
182 | execfuncs = ("bb.build.exec_func", "bb.build.exec_task") | ||
183 | |||
184 | def warn(self, func, arg): | ||
185 | """Warn about calls of bitbake APIs which pass a non-literal | ||
186 | argument for the variable name, as we're not able to track such | ||
187 | a reference. | ||
188 | """ | ||
189 | |||
190 | try: | ||
191 | funcstr = codegen.to_source(func) | ||
192 | argstr = codegen.to_source(arg) | ||
193 | except TypeError: | ||
194 | self.log.debug(2, 'Failed to convert function and argument to source form') | ||
195 | else: | ||
196 | self.log.debug(1, self.unhandled_message % (funcstr, argstr)) | ||
197 | |||
198 | def visit_Call(self, node): | ||
199 | name = self.called_node_name(node.func) | ||
200 | if name and name.endswith(self.getvars) or name in self.containsfuncs: | ||
201 | if isinstance(node.args[0], ast.Str): | ||
202 | varname = node.args[0].s | ||
203 | if name in self.containsfuncs and isinstance(node.args[1], ast.Str): | ||
204 | if varname not in self.contains: | ||
205 | self.contains[varname] = set() | ||
206 | self.contains[varname].add(node.args[1].s) | ||
207 | else: | ||
208 | self.references.add(node.args[0].s) | ||
209 | else: | ||
210 | self.warn(node.func, node.args[0]) | ||
211 | elif name in self.execfuncs: | ||
212 | if isinstance(node.args[0], ast.Str): | ||
213 | self.var_execs.add(node.args[0].s) | ||
214 | else: | ||
215 | self.warn(node.func, node.args[0]) | ||
216 | elif name and isinstance(node.func, (ast.Name, ast.Attribute)): | ||
217 | self.execs.add(name) | ||
218 | |||
219 | def called_node_name(self, node): | ||
220 | """Given a called node, return its original string form""" | ||
221 | components = [] | ||
222 | while node: | ||
223 | if isinstance(node, ast.Attribute): | ||
224 | components.append(node.attr) | ||
225 | node = node.value | ||
226 | elif isinstance(node, ast.Name): | ||
227 | components.append(node.id) | ||
228 | return '.'.join(reversed(components)) | ||
229 | else: | ||
230 | break | ||
231 | |||
232 | def __init__(self, name, log): | ||
233 | self.var_execs = set() | ||
234 | self.contains = {} | ||
235 | self.execs = set() | ||
236 | self.references = set() | ||
237 | self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log) | ||
238 | |||
239 | self.unhandled_message = "in call of %s, argument '%s' is not a string literal" | ||
240 | self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message) | ||
241 | |||
242 | def parse_python(self, node): | ||
243 | h = hash(str(node)) | ||
244 | |||
245 | if h in codeparsercache.pythoncache: | ||
246 | self.references = set(codeparsercache.pythoncache[h].refs) | ||
247 | self.execs = set(codeparsercache.pythoncache[h].execs) | ||
248 | self.contains = {} | ||
249 | for i in codeparsercache.pythoncache[h].contains: | ||
250 | self.contains[i] = set(codeparsercache.pythoncache[h].contains[i]) | ||
251 | return | ||
252 | |||
253 | if h in codeparsercache.pythoncacheextras: | ||
254 | self.references = set(codeparsercache.pythoncacheextras[h].refs) | ||
255 | self.execs = set(codeparsercache.pythoncacheextras[h].execs) | ||
256 | self.contains = {} | ||
257 | for i in codeparsercache.pythoncacheextras[h].contains: | ||
258 | self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) | ||
259 | return | ||
260 | |||
261 | code = compile(check_indent(str(node)), "<string>", "exec", | ||
262 | ast.PyCF_ONLY_AST) | ||
263 | |||
264 | for n in ast.walk(code): | ||
265 | if n.__class__.__name__ == "Call": | ||
266 | self.visit_Call(n) | ||
267 | |||
268 | self.execs.update(self.var_execs) | ||
269 | |||
270 | codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains) | ||
271 | |||
272 | class ShellParser(): | ||
273 | def __init__(self, name, log): | ||
274 | self.funcdefs = set() | ||
275 | self.allexecs = set() | ||
276 | self.execs = set() | ||
277 | self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log) | ||
278 | self.unhandled_template = "unable to handle non-literal command '%s'" | ||
279 | self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template) | ||
280 | |||
281 | def parse_shell(self, value): | ||
282 | """Parse the supplied shell code in a string, returning the external | ||
283 | commands it executes. | ||
284 | """ | ||
285 | |||
286 | h = hash(str(value)) | ||
287 | |||
288 | if h in codeparsercache.shellcache: | ||
289 | self.execs = set(codeparsercache.shellcache[h].execs) | ||
290 | return self.execs | ||
291 | |||
292 | if h in codeparsercache.shellcacheextras: | ||
293 | self.execs = set(codeparsercache.shellcacheextras[h].execs) | ||
294 | return self.execs | ||
295 | |||
296 | self._parse_shell(value) | ||
297 | self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs) | ||
298 | |||
299 | codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs) | ||
300 | |||
301 | return self.execs | ||
302 | |||
303 | def _parse_shell(self, value): | ||
304 | try: | ||
305 | tokens, _ = pyshyacc.parse(value, eof=True, debug=False) | ||
306 | except pyshlex.NeedMore: | ||
307 | raise sherrors.ShellSyntaxError("Unexpected EOF") | ||
308 | |||
309 | for token in tokens: | ||
310 | self.process_tokens(token) | ||
311 | |||
312 | def process_tokens(self, tokens): | ||
313 | """Process a supplied portion of the syntax tree as returned by | ||
314 | pyshyacc.parse. | ||
315 | """ | ||
316 | |||
317 | def function_definition(value): | ||
318 | self.funcdefs.add(value.name) | ||
319 | return [value.body], None | ||
320 | |||
321 | def case_clause(value): | ||
322 | # Element 0 of each item in the case is the list of patterns, and | ||
323 | # Element 1 of each item in the case is the list of commands to be | ||
324 | # executed when that pattern matches. | ||
325 | words = chain(*[item[0] for item in value.items]) | ||
326 | cmds = chain(*[item[1] for item in value.items]) | ||
327 | return cmds, words | ||
328 | |||
329 | def if_clause(value): | ||
330 | main = chain(value.cond, value.if_cmds) | ||
331 | rest = value.else_cmds | ||
332 | if isinstance(rest, tuple) and rest[0] == "elif": | ||
333 | return chain(main, if_clause(rest[1])) | ||
334 | else: | ||
335 | return chain(main, rest) | ||
336 | |||
337 | def simple_command(value): | ||
338 | return None, chain(value.words, (assign[1] for assign in value.assigns)) | ||
339 | |||
340 | token_handlers = { | ||
341 | "and_or": lambda x: ((x.left, x.right), None), | ||
342 | "async": lambda x: ([x], None), | ||
343 | "brace_group": lambda x: (x.cmds, None), | ||
344 | "for_clause": lambda x: (x.cmds, x.items), | ||
345 | "function_definition": function_definition, | ||
346 | "if_clause": lambda x: (if_clause(x), None), | ||
347 | "pipeline": lambda x: (x.commands, None), | ||
348 | "redirect_list": lambda x: ([x.cmd], None), | ||
349 | "subshell": lambda x: (x.cmds, None), | ||
350 | "while_clause": lambda x: (chain(x.condition, x.cmds), None), | ||
351 | "until_clause": lambda x: (chain(x.condition, x.cmds), None), | ||
352 | "simple_command": simple_command, | ||
353 | "case_clause": case_clause, | ||
354 | } | ||
355 | |||
356 | for token in tokens: | ||
357 | name, value = token | ||
358 | try: | ||
359 | more_tokens, words = token_handlers[name](value) | ||
360 | except KeyError: | ||
361 | raise NotImplementedError("Unsupported token type " + name) | ||
362 | |||
363 | if more_tokens: | ||
364 | self.process_tokens(more_tokens) | ||
365 | |||
366 | if words: | ||
367 | self.process_words(words) | ||
368 | |||
369 | def process_words(self, words): | ||
370 | """Process a set of 'words' in pyshyacc parlance, which includes | ||
371 | extraction of executed commands from $() blocks, as well as grabbing | ||
372 | the command name argument. | ||
373 | """ | ||
374 | |||
375 | words = list(words) | ||
376 | for word in list(words): | ||
377 | wtree = pyshlex.make_wordtree(word[1]) | ||
378 | for part in wtree: | ||
379 | if not isinstance(part, list): | ||
380 | continue | ||
381 | |||
382 | if part[0] in ('`', '$('): | ||
383 | command = pyshlex.wordtree_as_string(part[1:-1]) | ||
384 | self._parse_shell(command) | ||
385 | |||
386 | if word[0] in ("cmd_name", "cmd_word"): | ||
387 | if word in words: | ||
388 | words.remove(word) | ||
389 | |||
390 | usetoken = False | ||
391 | for word in words: | ||
392 | if word[0] in ("cmd_name", "cmd_word") or \ | ||
393 | (usetoken and word[0] == "TOKEN"): | ||
394 | if "=" in word[1]: | ||
395 | usetoken = True | ||
396 | continue | ||
397 | |||
398 | cmd = word[1] | ||
399 | if cmd.startswith("$"): | ||
400 | self.log.debug(1, self.unhandled_template % cmd) | ||
401 | elif cmd == "eval": | ||
402 | command = " ".join(word for _, word in words[1:]) | ||
403 | self._parse_shell(command) | ||
404 | else: | ||
405 | self.allexecs.add(cmd) | ||
406 | break | ||
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py new file mode 100644 index 0000000..60f9ac0 --- /dev/null +++ b/bitbake/lib/bb/command.py | |||
@@ -0,0 +1,451 @@ | |||
1 | """ | ||
2 | BitBake 'Command' module | ||
3 | |||
4 | Provide an interface to interact with the bitbake server through 'commands' | ||
5 | """ | ||
6 | |||
7 | # Copyright (C) 2006-2007 Richard Purdie | ||
8 | # | ||
9 | # This program is free software; you can redistribute it and/or modify | ||
10 | # it under the terms of the GNU General Public License version 2 as | ||
11 | # published by the Free Software Foundation. | ||
12 | # | ||
13 | # This program is distributed in the hope that it will be useful, | ||
14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
16 | # GNU General Public License for more details. | ||
17 | # | ||
18 | # You should have received a copy of the GNU General Public License along | ||
19 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
20 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
21 | |||
22 | """ | ||
23 | The bitbake server takes 'commands' from its UI/commandline. | ||
24 | Commands are either synchronous or asynchronous. | ||
25 | Async commands return data to the client in the form of events. | ||
26 | Sync commands must only return data through the function return value | ||
27 | and must not trigger events, directly or indirectly. | ||
28 | Commands are queued in a CommandQueue | ||
29 | """ | ||
30 | |||
31 | import bb.event | ||
32 | import bb.cooker | ||
33 | |||
34 | class CommandCompleted(bb.event.Event): | ||
35 | pass | ||
36 | |||
37 | class CommandExit(bb.event.Event): | ||
38 | def __init__(self, exitcode): | ||
39 | bb.event.Event.__init__(self) | ||
40 | self.exitcode = int(exitcode) | ||
41 | |||
42 | class CommandFailed(CommandExit): | ||
43 | def __init__(self, message): | ||
44 | self.error = message | ||
45 | CommandExit.__init__(self, 1) | ||
46 | |||
47 | class CommandError(Exception): | ||
48 | pass | ||
49 | |||
50 | class Command: | ||
51 | """ | ||
52 | A queue of asynchronous commands for bitbake | ||
53 | """ | ||
54 | def __init__(self, cooker): | ||
55 | self.cooker = cooker | ||
56 | self.cmds_sync = CommandsSync() | ||
57 | self.cmds_async = CommandsAsync() | ||
58 | |||
59 | # FIXME Add lock for this | ||
60 | self.currentAsyncCommand = None | ||
61 | |||
62 | def runCommand(self, commandline, ro_only = False): | ||
63 | command = commandline.pop(0) | ||
64 | if hasattr(CommandsSync, command): | ||
65 | # Can run synchronous commands straight away | ||
66 | command_method = getattr(self.cmds_sync, command) | ||
67 | if ro_only: | ||
68 | if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'): | ||
69 | return None, "Not able to execute not readonly commands in readonly mode" | ||
70 | try: | ||
71 | result = command_method(self, commandline) | ||
72 | except CommandError as exc: | ||
73 | return None, exc.args[0] | ||
74 | except Exception: | ||
75 | import traceback | ||
76 | return None, traceback.format_exc() | ||
77 | else: | ||
78 | return result, None | ||
79 | if self.currentAsyncCommand is not None: | ||
80 | return None, "Busy (%s in progress)" % self.currentAsyncCommand[0] | ||
81 | if command not in CommandsAsync.__dict__: | ||
82 | return None, "No such command" | ||
83 | self.currentAsyncCommand = (command, commandline) | ||
84 | self.cooker.configuration.server_register_idlecallback(self.cooker.runCommands, self.cooker) | ||
85 | return True, None | ||
86 | |||
87 | def runAsyncCommand(self): | ||
88 | try: | ||
89 | if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown): | ||
90 | # updateCache will trigger a shutdown of the parser | ||
91 | # and then raise BBHandledException triggering an exit | ||
92 | self.cooker.updateCache() | ||
93 | return False | ||
94 | if self.currentAsyncCommand is not None: | ||
95 | (command, options) = self.currentAsyncCommand | ||
96 | commandmethod = getattr(CommandsAsync, command) | ||
97 | needcache = getattr( commandmethod, "needcache" ) | ||
98 | if needcache and self.cooker.state != bb.cooker.state.running: | ||
99 | self.cooker.updateCache() | ||
100 | return True | ||
101 | else: | ||
102 | commandmethod(self.cmds_async, self, options) | ||
103 | return False | ||
104 | else: | ||
105 | return False | ||
106 | except KeyboardInterrupt as exc: | ||
107 | self.finishAsyncCommand("Interrupted") | ||
108 | return False | ||
109 | except SystemExit as exc: | ||
110 | arg = exc.args[0] | ||
111 | if isinstance(arg, basestring): | ||
112 | self.finishAsyncCommand(arg) | ||
113 | else: | ||
114 | self.finishAsyncCommand("Exited with %s" % arg) | ||
115 | return False | ||
116 | except Exception as exc: | ||
117 | import traceback | ||
118 | if isinstance(exc, bb.BBHandledException): | ||
119 | self.finishAsyncCommand("") | ||
120 | else: | ||
121 | self.finishAsyncCommand(traceback.format_exc()) | ||
122 | return False | ||
123 | |||
124 | def finishAsyncCommand(self, msg=None, code=None): | ||
125 | if msg or msg == "": | ||
126 | bb.event.fire(CommandFailed(msg), self.cooker.event_data) | ||
127 | elif code: | ||
128 | bb.event.fire(CommandExit(code), self.cooker.event_data) | ||
129 | else: | ||
130 | bb.event.fire(CommandCompleted(), self.cooker.event_data) | ||
131 | self.currentAsyncCommand = None | ||
132 | self.cooker.finishcommand() | ||
133 | |||
134 | class CommandsSync: | ||
135 | """ | ||
136 | A class of synchronous commands | ||
137 | These should run quickly so as not to hurt interactive performance. | ||
138 | These must not influence any running synchronous command. | ||
139 | """ | ||
140 | |||
141 | def stateShutdown(self, command, params): | ||
142 | """ | ||
143 | Trigger cooker 'shutdown' mode | ||
144 | """ | ||
145 | command.cooker.shutdown(False) | ||
146 | |||
147 | def stateForceShutdown(self, command, params): | ||
148 | """ | ||
149 | Stop the cooker | ||
150 | """ | ||
151 | command.cooker.shutdown(True) | ||
152 | |||
153 | def getAllKeysWithFlags(self, command, params): | ||
154 | """ | ||
155 | Returns a dump of the global state. Call with | ||
156 | variable flags to be retrieved as params. | ||
157 | """ | ||
158 | flaglist = params[0] | ||
159 | return command.cooker.getAllKeysWithFlags(flaglist) | ||
160 | getAllKeysWithFlags.readonly = True | ||
161 | |||
162 | def getVariable(self, command, params): | ||
163 | """ | ||
164 | Read the value of a variable from data | ||
165 | """ | ||
166 | varname = params[0] | ||
167 | expand = True | ||
168 | if len(params) > 1: | ||
169 | expand = (params[1] == "True") | ||
170 | |||
171 | return command.cooker.data.getVar(varname, expand) | ||
172 | getVariable.readonly = True | ||
173 | |||
174 | def setVariable(self, command, params): | ||
175 | """ | ||
176 | Set the value of variable in data | ||
177 | """ | ||
178 | varname = params[0] | ||
179 | value = str(params[1]) | ||
180 | command.cooker.data.setVar(varname, value) | ||
181 | |||
182 | def setConfig(self, command, params): | ||
183 | """ | ||
184 | Set the value of variable in configuration | ||
185 | """ | ||
186 | varname = params[0] | ||
187 | value = str(params[1]) | ||
188 | setattr(command.cooker.configuration, varname, value) | ||
189 | |||
190 | def enableDataTracking(self, command, params): | ||
191 | """ | ||
192 | Enable history tracking for variables | ||
193 | """ | ||
194 | command.cooker.enableDataTracking() | ||
195 | |||
196 | def disableDataTracking(self, command, params): | ||
197 | """ | ||
198 | Disable history tracking for variables | ||
199 | """ | ||
200 | command.cooker.disableDataTracking() | ||
201 | |||
202 | def setPrePostConfFiles(self, command, params): | ||
203 | prefiles = params[0].split() | ||
204 | postfiles = params[1].split() | ||
205 | command.cooker.configuration.prefile = prefiles | ||
206 | command.cooker.configuration.postfile = postfiles | ||
207 | |||
208 | def getCpuCount(self, command, params): | ||
209 | """ | ||
210 | Get the CPU count on the bitbake server | ||
211 | """ | ||
212 | return bb.utils.cpu_count() | ||
213 | getCpuCount.readonly = True | ||
214 | |||
215 | def matchFile(self, command, params): | ||
216 | fMatch = params[0] | ||
217 | return command.cooker.matchFile(fMatch) | ||
218 | |||
219 | def generateNewImage(self, command, params): | ||
220 | image = params[0] | ||
221 | base_image = params[1] | ||
222 | package_queue = params[2] | ||
223 | timestamp = params[3] | ||
224 | description = params[4] | ||
225 | return command.cooker.generateNewImage(image, base_image, | ||
226 | package_queue, timestamp, description) | ||
227 | |||
228 | def ensureDir(self, command, params): | ||
229 | directory = params[0] | ||
230 | bb.utils.mkdirhier(directory) | ||
231 | |||
232 | def setVarFile(self, command, params): | ||
233 | """ | ||
234 | Save a variable in a file; used for saving in a configuration file | ||
235 | """ | ||
236 | var = params[0] | ||
237 | val = params[1] | ||
238 | default_file = params[2] | ||
239 | op = params[3] | ||
240 | command.cooker.modifyConfigurationVar(var, val, default_file, op) | ||
241 | |||
242 | def removeVarFile(self, command, params): | ||
243 | """ | ||
244 | Remove a variable declaration from a file | ||
245 | """ | ||
246 | var = params[0] | ||
247 | command.cooker.removeConfigurationVar(var) | ||
248 | |||
249 | def createConfigFile(self, command, params): | ||
250 | """ | ||
251 | Create an extra configuration file | ||
252 | """ | ||
253 | name = params[0] | ||
254 | command.cooker.createConfigFile(name) | ||
255 | |||
256 | def setEventMask(self, command, params): | ||
257 | handlerNum = params[0] | ||
258 | llevel = params[1] | ||
259 | debug_domains = params[2] | ||
260 | mask = params[3] | ||
261 | return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask) | ||
262 | |||
263 | def setFeatures(self, command, params): | ||
264 | """ | ||
265 | Set the cooker features to include the passed list of features | ||
266 | """ | ||
267 | features = params[0] | ||
268 | command.cooker.setFeatures(features) | ||
269 | |||
270 | # although we change the internal state of the cooker, this is transparent since | ||
271 | # we always take and leave the cooker in state.initial | ||
272 | setFeatures.readonly = True | ||
273 | |||
274 | def updateConfig(self, command, params): | ||
275 | options = params[0] | ||
276 | command.cooker.updateConfigOpts(options) | ||
277 | |||
278 | class CommandsAsync: | ||
279 | """ | ||
280 | A class of asynchronous commands | ||
281 | These functions communicate via generated events. | ||
282 | Any function that requires metadata parsing should be here. | ||
283 | """ | ||
284 | |||
285 | def buildFile(self, command, params): | ||
286 | """ | ||
287 | Build a single specified .bb file | ||
288 | """ | ||
289 | bfile = params[0] | ||
290 | task = params[1] | ||
291 | |||
292 | command.cooker.buildFile(bfile, task) | ||
293 | buildFile.needcache = False | ||
294 | |||
295 | def buildTargets(self, command, params): | ||
296 | """ | ||
297 | Build a set of targets | ||
298 | """ | ||
299 | pkgs_to_build = params[0] | ||
300 | task = params[1] | ||
301 | |||
302 | command.cooker.buildTargets(pkgs_to_build, task) | ||
303 | buildTargets.needcache = True | ||
304 | |||
305 | def generateDepTreeEvent(self, command, params): | ||
306 | """ | ||
307 | Generate an event containing the dependency information | ||
308 | """ | ||
309 | pkgs_to_build = params[0] | ||
310 | task = params[1] | ||
311 | |||
312 | command.cooker.generateDepTreeEvent(pkgs_to_build, task) | ||
313 | command.finishAsyncCommand() | ||
314 | generateDepTreeEvent.needcache = True | ||
315 | |||
316 | def generateDotGraph(self, command, params): | ||
317 | """ | ||
318 | Dump dependency information to disk as .dot files | ||
319 | """ | ||
320 | pkgs_to_build = params[0] | ||
321 | task = params[1] | ||
322 | |||
323 | command.cooker.generateDotGraphFiles(pkgs_to_build, task) | ||
324 | command.finishAsyncCommand() | ||
325 | generateDotGraph.needcache = True | ||
326 | |||
327 | def generateTargetsTree(self, command, params): | ||
328 | """ | ||
329 | Generate a tree of buildable targets. | ||
330 | If klass is provided ensure all recipes that inherit the class are | ||
331 | included in the package list. | ||
332 | If pkg_list provided use that list (plus any extras brought in by | ||
333 | klass) rather than generating a tree for all packages. | ||
334 | """ | ||
335 | klass = params[0] | ||
336 | pkg_list = params[1] | ||
337 | |||
338 | command.cooker.generateTargetsTree(klass, pkg_list) | ||
339 | command.finishAsyncCommand() | ||
340 | generateTargetsTree.needcache = True | ||
341 | |||
342 | def findCoreBaseFiles(self, command, params): | ||
343 | """ | ||
344 | Find certain files in COREBASE directory. i.e. Layers | ||
345 | """ | ||
346 | subdir = params[0] | ||
347 | filename = params[1] | ||
348 | |||
349 | command.cooker.findCoreBaseFiles(subdir, filename) | ||
350 | command.finishAsyncCommand() | ||
351 | findCoreBaseFiles.needcache = False | ||
352 | |||
353 | def findConfigFiles(self, command, params): | ||
354 | """ | ||
355 | Find config files which provide appropriate values | ||
356 | for the passed configuration variable. i.e. MACHINE | ||
357 | """ | ||
358 | varname = params[0] | ||
359 | |||
360 | command.cooker.findConfigFiles(varname) | ||
361 | command.finishAsyncCommand() | ||
362 | findConfigFiles.needcache = False | ||
363 | |||
364 | def findFilesMatchingInDir(self, command, params): | ||
365 | """ | ||
366 | Find implementation files matching the specified pattern | ||
367 | in the requested subdirectory of a BBPATH | ||
368 | """ | ||
369 | pattern = params[0] | ||
370 | directory = params[1] | ||
371 | |||
372 | command.cooker.findFilesMatchingInDir(pattern, directory) | ||
373 | command.finishAsyncCommand() | ||
374 | findFilesMatchingInDir.needcache = False | ||
375 | |||
376 | def findConfigFilePath(self, command, params): | ||
377 | """ | ||
378 | Find the path of the requested configuration file | ||
379 | """ | ||
380 | configfile = params[0] | ||
381 | |||
382 | command.cooker.findConfigFilePath(configfile) | ||
383 | command.finishAsyncCommand() | ||
384 | findConfigFilePath.needcache = False | ||
385 | |||
386 | def showVersions(self, command, params): | ||
387 | """ | ||
388 | Show the currently selected versions | ||
389 | """ | ||
390 | command.cooker.showVersions() | ||
391 | command.finishAsyncCommand() | ||
392 | showVersions.needcache = True | ||
393 | |||
394 | def showEnvironmentTarget(self, command, params): | ||
395 | """ | ||
396 | Print the environment of a target recipe | ||
397 | (needs the cache to work out which recipe to use) | ||
398 | """ | ||
399 | pkg = params[0] | ||
400 | |||
401 | command.cooker.showEnvironment(None, pkg) | ||
402 | command.finishAsyncCommand() | ||
403 | showEnvironmentTarget.needcache = True | ||
404 | |||
405 | def showEnvironment(self, command, params): | ||
406 | """ | ||
407 | Print the standard environment | ||
408 | or if specified the environment for a specified recipe | ||
409 | """ | ||
410 | bfile = params[0] | ||
411 | |||
412 | command.cooker.showEnvironment(bfile) | ||
413 | command.finishAsyncCommand() | ||
414 | showEnvironment.needcache = False | ||
415 | |||
416 | def parseFiles(self, command, params): | ||
417 | """ | ||
418 | Parse the .bb files | ||
419 | """ | ||
420 | command.cooker.updateCache() | ||
421 | command.finishAsyncCommand() | ||
422 | parseFiles.needcache = True | ||
423 | |||
424 | def compareRevisions(self, command, params): | ||
425 | """ | ||
426 | Parse the .bb files | ||
427 | """ | ||
428 | if bb.fetch.fetcher_compare_revisions(command.cooker.data): | ||
429 | command.finishAsyncCommand(code=1) | ||
430 | else: | ||
431 | command.finishAsyncCommand() | ||
432 | compareRevisions.needcache = True | ||
433 | |||
434 | def triggerEvent(self, command, params): | ||
435 | """ | ||
436 | Trigger a certain event | ||
437 | """ | ||
438 | event = params[0] | ||
439 | bb.event.fire(eval(event), command.cooker.data) | ||
440 | command.currentAsyncCommand = None | ||
441 | triggerEvent.needcache = False | ||
442 | |||
443 | def resetCooker(self, command, params): | ||
444 | """ | ||
445 | Reset the cooker to its initial state, thus forcing a reparse for | ||
446 | any async command that has the needcache property set to True | ||
447 | """ | ||
448 | command.cooker.reset() | ||
449 | command.finishAsyncCommand() | ||
450 | resetCooker.needcache = False | ||
451 | |||
diff --git a/bitbake/lib/bb/compat.py b/bitbake/lib/bb/compat.py new file mode 100644 index 0000000..de1923d --- /dev/null +++ b/bitbake/lib/bb/compat.py | |||
@@ -0,0 +1,6 @@ | |||
1 | """Code pulled from future python versions, here for compatibility""" | ||
2 | |||
3 | from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict | ||
4 | from functools import total_ordering | ||
5 | |||
6 | |||
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py new file mode 100644 index 0000000..879d2ba --- /dev/null +++ b/bitbake/lib/bb/cooker.py | |||
@@ -0,0 +1,2025 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | # | ||
5 | # Copyright (C) 2003, 2004 Chris Larson | ||
6 | # Copyright (C) 2003, 2004 Phil Blundell | ||
7 | # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer | ||
8 | # Copyright (C) 2005 Holger Hans Peter Freyther | ||
9 | # Copyright (C) 2005 ROAD GmbH | ||
10 | # Copyright (C) 2006 - 2007 Richard Purdie | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify | ||
13 | # it under the terms of the GNU General Public License version 2 as | ||
14 | # published by the Free Software Foundation. | ||
15 | # | ||
16 | # This program is distributed in the hope that it will be useful, | ||
17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
19 | # GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along | ||
22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
24 | |||
25 | from __future__ import print_function | ||
26 | import sys, os, glob, os.path, re, time | ||
27 | import atexit | ||
28 | import itertools | ||
29 | import logging | ||
30 | import multiprocessing | ||
31 | import sre_constants | ||
32 | import threading | ||
33 | from cStringIO import StringIO | ||
34 | from contextlib import closing | ||
35 | from functools import wraps | ||
36 | from collections import defaultdict | ||
37 | import bb, bb.exceptions, bb.command | ||
38 | from bb import utils, data, parse, event, cache, providers, taskdata, runqueue | ||
39 | import Queue | ||
40 | import signal | ||
41 | import prserv.serv | ||
42 | import pyinotify | ||
43 | |||
44 | logger = logging.getLogger("BitBake") | ||
45 | collectlog = logging.getLogger("BitBake.Collection") | ||
46 | buildlog = logging.getLogger("BitBake.Build") | ||
47 | parselog = logging.getLogger("BitBake.Parsing") | ||
48 | providerlog = logging.getLogger("BitBake.Provider") | ||
49 | |||
50 | class NoSpecificMatch(bb.BBHandledException): | ||
51 | """ | ||
52 | Exception raised when no or multiple file matches are found | ||
53 | """ | ||
54 | |||
55 | class NothingToBuild(Exception): | ||
56 | """ | ||
57 | Exception raised when there is nothing to build | ||
58 | """ | ||
59 | |||
60 | class CollectionError(bb.BBHandledException): | ||
61 | """ | ||
62 | Exception raised when layer configuration is incorrect | ||
63 | """ | ||
64 | |||
65 | class state: | ||
66 | initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7) | ||
67 | |||
68 | |||
69 | class SkippedPackage: | ||
70 | def __init__(self, info = None, reason = None): | ||
71 | self.pn = None | ||
72 | self.skipreason = None | ||
73 | self.provides = None | ||
74 | self.rprovides = None | ||
75 | |||
76 | if info: | ||
77 | self.pn = info.pn | ||
78 | self.skipreason = info.skipreason | ||
79 | self.provides = info.provides | ||
80 | self.rprovides = info.rprovides | ||
81 | elif reason: | ||
82 | self.skipreason = reason | ||
83 | |||
84 | |||
85 | class CookerFeatures(object): | ||
86 | _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4) | ||
87 | |||
88 | def __init__(self): | ||
89 | self._features=set() | ||
90 | |||
91 | def setFeature(self, f): | ||
92 | # validate we got a request for a feature we support | ||
93 | if f not in CookerFeatures._feature_list: | ||
94 | return | ||
95 | self._features.add(f) | ||
96 | |||
97 | def __contains__(self, f): | ||
98 | return f in self._features | ||
99 | |||
100 | def __iter__(self): | ||
101 | return self._features.__iter__() | ||
102 | |||
103 | def next(self): | ||
104 | return self._features.next() | ||
105 | |||
106 | |||
107 | #============================================================================# | ||
108 | # BBCooker | ||
109 | #============================================================================# | ||
110 | class BBCooker: | ||
111 | """ | ||
112 | Manages one bitbake build run | ||
113 | """ | ||
114 | |||
115 | def __init__(self, configuration, featureSet = []): | ||
116 | self.recipecache = None | ||
117 | self.skiplist = {} | ||
118 | self.featureset = CookerFeatures() | ||
119 | for f in featureSet: | ||
120 | self.featureset.setFeature(f) | ||
121 | |||
122 | self.configuration = configuration | ||
123 | |||
124 | self.configwatcher = pyinotify.WatchManager() | ||
125 | self.configwatcher.bbseen = [] | ||
126 | self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications) | ||
127 | self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \ | ||
128 | pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \ | ||
129 | pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO | ||
130 | self.watcher = pyinotify.WatchManager() | ||
131 | self.watcher.bbseen = [] | ||
132 | self.notifier = pyinotify.Notifier(self.watcher, self.notifications) | ||
133 | |||
134 | |||
135 | self.initConfigurationData() | ||
136 | |||
137 | self.inotify_modified_files = [] | ||
138 | |||
139 | def _process_inotify_updates(server, notifier_list, abort): | ||
140 | for n in notifier_list: | ||
141 | if n.check_events(timeout=0): | ||
142 | # read notified events and enqeue them | ||
143 | n.read_events() | ||
144 | n.process_events() | ||
145 | return 1.0 | ||
146 | |||
147 | self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier]) | ||
148 | |||
149 | self.baseconfig_valid = True | ||
150 | self.parsecache_valid = False | ||
151 | |||
152 | # Take a lock so only one copy of bitbake can run against a given build | ||
153 | # directory at a time | ||
154 | lockfile = self.data.expand("${TOPDIR}/bitbake.lock") | ||
155 | self.lock = bb.utils.lockfile(lockfile, False, False) | ||
156 | if not self.lock: | ||
157 | bb.fatal("Only one copy of bitbake should be run against a build directory") | ||
158 | try: | ||
159 | self.lock.seek(0) | ||
160 | self.lock.truncate() | ||
161 | if len(configuration.interface) >= 2: | ||
162 | self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1])); | ||
163 | self.lock.flush() | ||
164 | except: | ||
165 | pass | ||
166 | |||
167 | # TOSTOP must not be set or our children will hang when they output | ||
168 | fd = sys.stdout.fileno() | ||
169 | if os.isatty(fd): | ||
170 | import termios | ||
171 | tcattr = termios.tcgetattr(fd) | ||
172 | if tcattr[3] & termios.TOSTOP: | ||
173 | buildlog.info("The terminal had the TOSTOP bit set, clearing...") | ||
174 | tcattr[3] = tcattr[3] & ~termios.TOSTOP | ||
175 | termios.tcsetattr(fd, termios.TCSANOW, tcattr) | ||
176 | |||
177 | self.command = bb.command.Command(self) | ||
178 | self.state = state.initial | ||
179 | |||
180 | self.parser = None | ||
181 | |||
182 | signal.signal(signal.SIGTERM, self.sigterm_exception) | ||
183 | # Let SIGHUP exit as SIGTERM | ||
184 | signal.signal(signal.SIGHUP, self.sigterm_exception) | ||
185 | |||
186 | def config_notifications(self, event): | ||
187 | if not event.path in self.inotify_modified_files: | ||
188 | self.inotify_modified_files.append(event.path) | ||
189 | self.baseconfig_valid = False | ||
190 | |||
191 | def notifications(self, event): | ||
192 | if not event.path in self.inotify_modified_files: | ||
193 | self.inotify_modified_files.append(event.path) | ||
194 | self.parsecache_valid = False | ||
195 | |||
196 | def add_filewatch(self, deps, watcher=None): | ||
197 | if not watcher: | ||
198 | watcher = self.watcher | ||
199 | for i in deps: | ||
200 | f = i[0] | ||
201 | if f in watcher.bbseen: | ||
202 | continue | ||
203 | watcher.bbseen.append(f) | ||
204 | while True: | ||
205 | # We try and add watches for files that don't exist but if they did, would influence | ||
206 | # the parser. The parent directory of these files may not exist, in which case we need | ||
207 | # to watch any parent that does exist for changes. | ||
208 | try: | ||
209 | watcher.add_watch(f, self.watchmask, quiet=False) | ||
210 | break | ||
211 | except pyinotify.WatchManagerError as e: | ||
212 | if 'ENOENT' in str(e): | ||
213 | f = os.path.dirname(f) | ||
214 | watcher.bbseen.append(f) | ||
215 | continue | ||
216 | raise | ||
217 | |||
218 | def sigterm_exception(self, signum, stackframe): | ||
219 | if signum == signal.SIGTERM: | ||
220 | bb.warn("Cooker recieved SIGTERM, shutting down...") | ||
221 | elif signum == signal.SIGHUP: | ||
222 | bb.warn("Cooker recieved SIGHUP, shutting down...") | ||
223 | self.state = state.forceshutdown | ||
224 | |||
225 | def setFeatures(self, features): | ||
226 | # we only accept a new feature set if we're in state initial, so we can reset without problems | ||
227 | if self.state != state.initial: | ||
228 | raise Exception("Illegal state for feature set change") | ||
229 | original_featureset = list(self.featureset) | ||
230 | for feature in features: | ||
231 | self.featureset.setFeature(feature) | ||
232 | bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) | ||
233 | if (original_featureset != list(self.featureset)): | ||
234 | self.reset() | ||
235 | |||
236 | def initConfigurationData(self): | ||
237 | |||
238 | self.state = state.initial | ||
239 | self.caches_array = [] | ||
240 | |||
241 | if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: | ||
242 | self.enableDataTracking() | ||
243 | |||
244 | all_extra_cache_names = [] | ||
245 | # We hardcode all known cache types in a single place, here. | ||
246 | if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: | ||
247 | all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") | ||
248 | |||
249 | caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names | ||
250 | |||
251 | # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! | ||
252 | # This is the entry point, no further check needed! | ||
253 | for var in caches_name_array: | ||
254 | try: | ||
255 | module_name, cache_name = var.split(':') | ||
256 | module = __import__(module_name, fromlist=(cache_name,)) | ||
257 | self.caches_array.append(getattr(module, cache_name)) | ||
258 | except ImportError as exc: | ||
259 | logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc)) | ||
260 | sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name) | ||
261 | |||
262 | self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) | ||
263 | self.databuilder.parseBaseConfiguration() | ||
264 | self.data = self.databuilder.data | ||
265 | self.data_hash = self.databuilder.data_hash | ||
266 | |||
267 | |||
268 | # we log all events to a file if so directed | ||
269 | if self.configuration.writeeventlog: | ||
270 | import json, pickle | ||
271 | DEFAULT_EVENTFILE = self.configuration.writeeventlog | ||
272 | class EventLogWriteHandler(): | ||
273 | |||
274 | class EventWriter(): | ||
275 | def __init__(self, cooker): | ||
276 | self.file_inited = None | ||
277 | self.cooker = cooker | ||
278 | self.event_queue = [] | ||
279 | |||
280 | def init_file(self): | ||
281 | try: | ||
282 | # delete the old log | ||
283 | os.remove(DEFAULT_EVENTFILE) | ||
284 | except: | ||
285 | pass | ||
286 | |||
287 | # write current configuration data | ||
288 | with open(DEFAULT_EVENTFILE, "w") as f: | ||
289 | f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])})) | ||
290 | |||
291 | def write_event(self, event): | ||
292 | with open(DEFAULT_EVENTFILE, "a") as f: | ||
293 | try: | ||
294 | f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) })) | ||
295 | except Exception as e: | ||
296 | import traceback | ||
297 | print(e, traceback.format_exc(e)) | ||
298 | |||
299 | |||
300 | def send(self, event): | ||
301 | event_class = event.__module__ + "." + event.__class__.__name__ | ||
302 | |||
303 | # init on bb.event.BuildStarted | ||
304 | if self.file_inited is None: | ||
305 | if event_class == "bb.event.BuildStarted": | ||
306 | self.init_file() | ||
307 | self.file_inited = True | ||
308 | |||
309 | # write pending events | ||
310 | for e in self.event_queue: | ||
311 | self.write_event(e) | ||
312 | |||
313 | # also write the current event | ||
314 | self.write_event(event) | ||
315 | |||
316 | else: | ||
317 | # queue all events until the file is inited | ||
318 | self.event_queue.append(event) | ||
319 | |||
320 | else: | ||
321 | # we have the file, just write the event | ||
322 | self.write_event(event) | ||
323 | |||
324 | # set our handler's event processor | ||
325 | event = EventWriter(self) # self is the cooker here | ||
326 | |||
327 | |||
328 | # set up cooker features for this mock UI handler | ||
329 | |||
330 | # we need to write the dependency tree in the log | ||
331 | self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE) | ||
332 | # register the log file writer as UI Handler | ||
333 | bb.event.register_UIHhandler(EventLogWriteHandler()) | ||
334 | |||
335 | |||
336 | # | ||