summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/COW.py323
-rw-r--r--bitbake/lib/bb/__init__.py143
-rw-r--r--bitbake/lib/bb/build.py709
-rw-r--r--bitbake/lib/bb/cache.py847
-rw-r--r--bitbake/lib/bb/cache_extra.py75
-rw-r--r--bitbake/lib/bb/checksum.py90
-rw-r--r--bitbake/lib/bb/codeparser.py328
-rw-r--r--bitbake/lib/bb/command.py444
-rw-r--r--bitbake/lib/bb/compat.py6
-rw-r--r--bitbake/lib/bb/cooker.py1874
-rw-r--r--bitbake/lib/bb/cookerdata.py305
-rw-r--r--bitbake/lib/bb/daemonize.py190
-rw-r--r--bitbake/lib/bb/data.py403
-rw-r--r--bitbake/lib/bb/data_smart.py804
-rw-r--r--bitbake/lib/bb/event.py641
-rw-r--r--bitbake/lib/bb/exceptions.py91
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1575
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py143
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py171
-rw-r--r--bitbake/lib/bb/fetch2/git.py355
-rw-r--r--bitbake/lib/bb/fetch2/gitannex.py76
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py126
-rw-r--r--bitbake/lib/bb/fetch2/hg.py187
-rw-r--r--bitbake/lib/bb/fetch2/local.py116
-rw-r--r--bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py194
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py129
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py127
-rw-r--r--bitbake/lib/bb/fetch2/svn.py191
-rw-r--r--bitbake/lib/bb/fetch2/wget.py106
-rw-r--r--bitbake/lib/bb/methodpool.py29
-rw-r--r--bitbake/lib/bb/monitordisk.py265
-rw-r--r--bitbake/lib/bb/msg.py196
-rw-r--r--bitbake/lib/bb/namedtuple_with_abc.py255
-rw-r--r--bitbake/lib/bb/parse/__init__.py157
-rw-r--r--bitbake/lib/bb/parse/ast.py478
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py267
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py189
-rw-r--r--bitbake/lib/bb/parse/parse_py/__init__.py33
-rw-r--r--bitbake/lib/bb/persist_data.py215
-rw-r--r--bitbake/lib/bb/process.py133
-rw-r--r--bitbake/lib/bb/providers.py381
-rw-r--r--bitbake/lib/bb/pysh/__init__.py0
-rw-r--r--bitbake/lib/bb/pysh/builtin.py710
-rw-r--r--bitbake/lib/bb/pysh/interp.py1367
-rw-r--r--bitbake/lib/bb/pysh/lsprof.py116
-rw-r--r--bitbake/lib/bb/pysh/pysh.py167
-rw-r--r--bitbake/lib/bb/pysh/pyshlex.py888
-rw-r--r--bitbake/lib/bb/pysh/pyshyacc.py779
-rw-r--r--bitbake/lib/bb/pysh/sherrors.py41
-rw-r--r--bitbake/lib/bb/pysh/subprocess_fix.py77
-rw-r--r--bitbake/lib/bb/runqueue.py2154
-rw-r--r--bitbake/lib/bb/server/__init__.py96
-rw-r--r--bitbake/lib/bb/server/process.py236
-rw-r--r--bitbake/lib/bb/server/xmlrpc.py392
-rw-r--r--bitbake/lib/bb/shell.py820
-rw-r--r--bitbake/lib/bb/siggen.py483
-rw-r--r--bitbake/lib/bb/taskdata.py651
-rw-r--r--bitbake/lib/bb/tests/__init__.py0
-rw-r--r--bitbake/lib/bb/tests/codeparser.py375
-rw-r--r--bitbake/lib/bb/tests/cow.py136
-rw-r--r--bitbake/lib/bb/tests/data.py296
-rw-r--r--bitbake/lib/bb/tests/fetch.py562
-rw-r--r--bitbake/lib/bb/tests/utils.py53
-rw-r--r--bitbake/lib/bb/tinfoil.py96
-rw-r--r--bitbake/lib/bb/ui/__init__.py17
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py964
-rw-r--r--bitbake/lib/bb/ui/crumbs/__init__.py17
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/builddetailspage.py437
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/builder.py1475
-rw-r--r--bitbake/lib/bb/ui/crumbs/buildmanager.py455
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/__init__.py0
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py341
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py44
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py70
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py219
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py172
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py298
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py163
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/propertydialog.py437
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py90
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py51
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py159
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py122
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py894
-rw-r--r--bitbake/lib/bb/ui/crumbs/hobcolor.py38
-rw-r--r--bitbake/lib/bb/ui/crumbs/hobeventhandler.py639
-rw-r--r--bitbake/lib/bb/ui/crumbs/hoblistmodel.py903
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/hobpages.py128
-rw-r--r--bitbake/lib/bb/ui/crumbs/hobwidget.py904
-rw-r--r--bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py561
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/imagedetailspage.py669
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/packageselectionpage.py355
-rw-r--r--bitbake/lib/bb/ui/crumbs/persistenttooltip.py186
-rw-r--r--bitbake/lib/bb/ui/crumbs/progress.py23
-rw-r--r--bitbake/lib/bb/ui/crumbs/progressbar.py59
-rw-r--r--bitbake/lib/bb/ui/crumbs/puccho.glade606
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/recipeselectionpage.py335
-rw-r--r--bitbake/lib/bb/ui/crumbs/runningbuild.py551
-rw-r--r--bitbake/lib/bb/ui/crumbs/sanitycheckpage.py85
-rw-r--r--bitbake/lib/bb/ui/crumbs/utils.py34
-rw-r--r--bitbake/lib/bb/ui/depexp.py326
-rw-r--r--bitbake/lib/bb/ui/goggle.py121
-rwxr-xr-xbitbake/lib/bb/ui/hob.py109
-rw-r--r--bitbake/lib/bb/ui/icons/images/images_display.pngbin0 -> 6898 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/images/images_hover.pngbin0 -> 7051 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/add-hover.pngbin0 -> 1212 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/add.pngbin0 -> 1176 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/alert.pngbin0 -> 3954 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/confirmation.pngbin0 -> 5789 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/denied.pngbin0 -> 3955 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/error.pngbin0 -> 6482 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/info.pngbin0 -> 3311 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/issues.pngbin0 -> 4549 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/refresh.pngbin0 -> 5250 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/remove-hover.pngbin0 -> 2809 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/remove.pngbin0 -> 1971 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/tick.pngbin0 -> 4563 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/info/info_display.pngbin0 -> 4117 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/info/info_hover.pngbin0 -> 4167 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/layers/layers_display.pngbin0 -> 4840 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/layers/layers_hover.pngbin0 -> 5257 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/packages/packages_display.pngbin0 -> 7011 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/packages/packages_hover.pngbin0 -> 7121 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/recipe/recipe_display.pngbin0 -> 4723 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/recipe/recipe_hover.pngbin0 -> 4866 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/settings/settings_display.pngbin0 -> 6076 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/settings/settings_hover.pngbin0 -> 6269 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/templates/templates_display.pngbin0 -> 5651 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/templates/templates_hover.pngbin0 -> 5791 bytes
-rw-r--r--bitbake/lib/bb/ui/knotty.py550
-rw-r--r--bitbake/lib/bb/ui/ncurses.py373
-rw-r--r--bitbake/lib/bb/ui/puccho.py425
-rw-r--r--bitbake/lib/bb/ui/toasterui.py292
-rw-r--r--bitbake/lib/bb/ui/uievent.py133
-rw-r--r--bitbake/lib/bb/ui/uihelper.py100
-rw-r--r--bitbake/lib/bb/utils.py878
138 files changed, 39903 insertions, 0 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
new file mode 100644
index 0000000..6917ec3
--- /dev/null
+++ b/bitbake/lib/bb/COW.py
@@ -0,0 +1,323 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
5#
6# Copyright (C) 2006 Tim Amsell
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21#Please Note:
22# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
23# Assign a file to __warn__ to get warnings about slow operations.
24#
25
26from __future__ import print_function
27import copy
28import types
29ImmutableTypes = (
30 types.NoneType,
31 bool,
32 complex,
33 float,
34 int,
35 long,
36 tuple,
37 frozenset,
38 basestring
39)
40
41MUTABLE = "__mutable__"
42
43class COWMeta(type):
44 pass
45
46class COWDictMeta(COWMeta):
47 __warn__ = False
48 __hasmutable__ = False
49 __marker__ = tuple()
50
51 def __str__(cls):
52 # FIXME: I have magic numbers!
53 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
54 __repr__ = __str__
55
56 def cow(cls):
57 class C(cls):
58 __count__ = cls.__count__ + 1
59 return C
60 copy = cow
61 __call__ = cow
62
63 def __setitem__(cls, key, value):
64 if not isinstance(value, ImmutableTypes):
65 if not isinstance(value, COWMeta):
66 cls.__hasmutable__ = True
67 key += MUTABLE
68 setattr(cls, key, value)
69
70 def __getmutable__(cls, key, readonly=False):
71 nkey = key + MUTABLE
72 try:
73 return cls.__dict__[nkey]
74 except KeyError:
75 pass
76
77 value = getattr(cls, nkey)
78 if readonly:
79 return value
80
81 if not cls.__warn__ is False and not isinstance(value, COWMeta):
82 print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__)
83 try:
84 value = value.copy()
85 except AttributeError as e:
86 value = copy.copy(value)
87 setattr(cls, nkey, value)
88 return value
89
90 __getmarker__ = []
91 def __getreadonly__(cls, key, default=__getmarker__):
92 """\
93 Get a value (even if mutable) which you promise not to change.
94 """
95 return cls.__getitem__(key, default, True)
96
97 def __getitem__(cls, key, default=__getmarker__, readonly=False):
98 try:
99 try:
100 value = getattr(cls, key)
101 except AttributeError:
102 value = cls.__getmutable__(key, readonly)
103
104 # This is for values which have been deleted
105 if value is cls.__marker__:
106 raise AttributeError("key %s does not exist." % key)
107
108 return value
109 except AttributeError as e:
110 if not default is cls.__getmarker__:
111 return default
112
113 raise KeyError(str(e))
114
115 def __delitem__(cls, key):
116 cls.__setitem__(key, cls.__marker__)
117
118 def __revertitem__(cls, key):
119 if not cls.__dict__.has_key(key):
120 key += MUTABLE
121 delattr(cls, key)
122
123 def __contains__(cls, key):
124 return cls.has_key(key)
125
126 def has_key(cls, key):
127 value = cls.__getreadonly__(key, cls.__marker__)
128 if value is cls.__marker__:
129 return False
130 return True
131
132 def iter(cls, type, readonly=False):
133 for key in dir(cls):
134 if key.startswith("__"):
135 continue
136
137 if key.endswith(MUTABLE):
138 key = key[:-len(MUTABLE)]
139
140 if type == "keys":
141 yield key
142
143 try:
144 if readonly:
145 value = cls.__getreadonly__(key)
146 else:
147 value = cls[key]
148 except KeyError:
149 continue
150
151 if type == "values":
152 yield value
153 if type == "items":
154 yield (key, value)
155 raise StopIteration()
156
157 def iterkeys(cls):
158 return cls.iter("keys")
159 def itervalues(cls, readonly=False):
160 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
161 print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
162 return cls.iter("values", readonly)
163 def iteritems(cls, readonly=False):
164 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
165 print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
166 return cls.iter("items", readonly)
167
168class COWSetMeta(COWDictMeta):
169 def __str__(cls):
170 # FIXME: I have magic numbers!
171 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
172 __repr__ = __str__
173
174 def cow(cls):
175 class C(cls):
176 __count__ = cls.__count__ + 1
177 return C
178
179 def add(cls, value):
180 COWDictMeta.__setitem__(cls, repr(hash(value)), value)
181
182 def remove(cls, value):
183 COWDictMeta.__delitem__(cls, repr(hash(value)))
184
185 def __in__(cls, value):
186 return COWDictMeta.has_key(repr(hash(value)))
187
188 def iterkeys(cls):
189 raise TypeError("sets don't have keys")
190
191 def iteritems(cls):
192 raise TypeError("sets don't have 'items'")
193
194# These are the actual classes you use!
195class COWDictBase(object):
196 __metaclass__ = COWDictMeta
197 __count__ = 0
198
199class COWSetBase(object):
200 __metaclass__ = COWSetMeta
201 __count__ = 0
202
203if __name__ == "__main__":
204 import sys
205 COWDictBase.__warn__ = sys.stderr
206 a = COWDictBase()
207 print("a", a)
208
209 a['a'] = 'a'
210 a['b'] = 'b'
211 a['dict'] = {}
212
213 b = a.copy()
214 print("b", b)
215 b['c'] = 'b'
216
217 print()
218
219 print("a", a)
220 for x in a.iteritems():
221 print(x)
222 print("--")
223 print("b", b)
224 for x in b.iteritems():
225 print(x)
226 print()
227
228 b['dict']['a'] = 'b'
229 b['a'] = 'c'
230
231 print("a", a)
232 for x in a.iteritems():
233 print(x)
234 print("--")
235 print("b", b)
236 for x in b.iteritems():
237 print(x)
238 print()
239
240 try:
241 b['dict2']
242 except KeyError as e:
243 print("Okay!")
244
245 a['set'] = COWSetBase()
246 a['set'].add("o1")
247 a['set'].add("o1")
248 a['set'].add("o2")
249
250 print("a", a)
251 for x in a['set'].itervalues():
252 print(x)
253 print("--")
254 print("b", b)
255 for x in b['set'].itervalues():
256 print(x)
257 print()
258
259 b['set'].add('o3')
260
261 print("a", a)
262 for x in a['set'].itervalues():
263 print(x)
264 print("--")
265 print("b", b)
266 for x in b['set'].itervalues():
267 print(x)
268 print()
269
270 a['set2'] = set()
271 a['set2'].add("o1")
272 a['set2'].add("o1")
273 a['set2'].add("o2")
274
275 print("a", a)
276 for x in a.iteritems():
277 print(x)
278 print("--")
279 print("b", b)
280 for x in b.iteritems(readonly=True):
281 print(x)
282 print()
283
284 del b['b']
285 try:
286 print(b['b'])
287 except KeyError:
288 print("Yay! deleted key raises error")
289
290 if b.has_key('b'):
291 print("Boo!")
292 else:
293 print("Yay - has_key with delete works!")
294
295 print("a", a)
296 for x in a.iteritems():
297 print(x)
298 print("--")
299 print("b", b)
300 for x in b.iteritems(readonly=True):
301 print(x)
302 print()
303
304 b.__revertitem__('b')
305
306 print("a", a)
307 for x in a.iteritems():
308 print(x)
309 print("--")
310 print("b", b)
311 for x in b.iteritems(readonly=True):
312 print(x)
313 print()
314
315 b.__revertitem__('dict')
316 print("a", a)
317 for x in a.iteritems():
318 print(x)
319 print("--")
320 print("b", b)
321 for x in b.iteritems(readonly=True):
322 print(x)
323 print()
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
new file mode 100644
index 0000000..30a9745
--- /dev/null
+++ b/bitbake/lib/bb/__init__.py
@@ -0,0 +1,143 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Build System Python Library
5#
6# Copyright (C) 2003 Holger Schurig
7# Copyright (C) 2003, 2004 Chris Larson
8#
9# Based on Gentoo's portage.py.
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24__version__ = "1.23.0"
25
26import sys
27if sys.version_info < (2, 7, 3):
28 raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake")
29
30
31class BBHandledException(Exception):
32 """
33 The big dilemma for generic bitbake code is what information to give the user
34 when an exception occurs. Any exception inheriting this base exception class
35 has already provided information to the user via some 'fired' message type such as
36 an explicitly fired event using bb.fire, or a bb.error message. If bitbake
37 encounters an exception derived from this class, no backtrace or other information
38 will be given to the user, its assumed the earlier event provided the relevant information.
39 """
40 pass
41
42import os
43import logging
44
45
46class NullHandler(logging.Handler):
47 def emit(self, record):
48 pass
49
50Logger = logging.getLoggerClass()
51class BBLogger(Logger):
52 def __init__(self, name):
53 if name.split(".")[0] == "BitBake":
54 self.debug = self.bbdebug
55 Logger.__init__(self, name)
56
57 def bbdebug(self, level, msg, *args, **kwargs):
58 return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs)
59
60 def plain(self, msg, *args, **kwargs):
61 return self.log(logging.INFO + 1, msg, *args, **kwargs)
62
63 def verbose(self, msg, *args, **kwargs):
64 return self.log(logging.INFO - 1, msg, *args, **kwargs)
65
66logging.raiseExceptions = False
67logging.setLoggerClass(BBLogger)
68
69logger = logging.getLogger("BitBake")
70logger.addHandler(NullHandler())
71logger.setLevel(logging.DEBUG - 2)
72
73# This has to be imported after the setLoggerClass, as the import of bb.msg
74# can result in construction of the various loggers.
75import bb.msg
76
77from bb import fetch2 as fetch
78sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
79
80# Messaging convenience functions
81def plain(*args):
82 logger.plain(''.join(args))
83
84def debug(lvl, *args):
85 if isinstance(lvl, basestring):
86 logger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
87 args = (lvl,) + args
88 lvl = 1
89 logger.debug(lvl, ''.join(args))
90
91def note(*args):
92 logger.info(''.join(args))
93
94def warn(*args):
95 logger.warn(''.join(args))
96
97def error(*args):
98 logger.error(''.join(args))
99
100def fatal(*args):
101 logger.critical(''.join(args))
102 sys.exit(1)
103
104
105def deprecated(func, name=None, advice=""):
106 """This is a decorator which can be used to mark functions
107 as deprecated. It will result in a warning being emmitted
108 when the function is used."""
109 import warnings
110
111 if advice:
112 advice = ": %s" % advice
113 if name is None:
114 name = func.__name__
115
116 def newFunc(*args, **kwargs):
117 warnings.warn("Call to deprecated function %s%s." % (name,
118 advice),
119 category=DeprecationWarning,
120 stacklevel=2)
121 return func(*args, **kwargs)
122 newFunc.__name__ = func.__name__
123 newFunc.__doc__ = func.__doc__
124 newFunc.__dict__.update(func.__dict__)
125 return newFunc
126
127# For compatibility
128def deprecate_import(current, modulename, fromlist, renames = None):
129 """Import objects from one module into another, wrapping them with a DeprecationWarning"""
130 import sys
131
132 module = __import__(modulename, fromlist = fromlist)
133 for position, objname in enumerate(fromlist):
134 obj = getattr(module, objname)
135 newobj = deprecated(obj, "{0}.{1}".format(current, objname),
136 "Please use {0}.{1} instead".format(modulename, objname))
137 if renames:
138 newname = renames[position]
139 else:
140 newname = objname
141
142 setattr(sys.modules[current], newname, newobj)
143
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
new file mode 100644
index 0000000..5cb4c06
--- /dev/null
+++ b/bitbake/lib/bb/build.py
@@ -0,0 +1,709 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake 'Build' implementation
5#
6# Core code for function execution and task handling in the
7# BitBake build tools.
8#
9# Copyright (C) 2003, 2004 Chris Larson
10#
11# Based on Gentoo's portage.py.
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import sys
30import logging
31import shlex
32import glob
33import time
34import bb
35import bb.msg
36import bb.process
37from contextlib import nested
38from bb import event, utils
39
40bblogger = logging.getLogger('BitBake')
41logger = logging.getLogger('BitBake.Build')
42
43NULL = open(os.devnull, 'r+')
44
45
46# When we execute a python function we'd like certain things
47# in all namespaces, hence we add them to __builtins__
48# If we do not do this and use the exec globals, they will
49# not be available to subfunctions.
50__builtins__['bb'] = bb
51__builtins__['os'] = os
52
53class FuncFailed(Exception):
54 def __init__(self, name = None, logfile = None):
55 self.logfile = logfile
56 self.name = name
57 if name:
58 self.msg = 'Function failed: %s' % name
59 else:
60 self.msg = "Function failed"
61
62 def __str__(self):
63 if self.logfile and os.path.exists(self.logfile):
64 msg = ("%s (log file is located at %s)" %
65 (self.msg, self.logfile))
66 else:
67 msg = self.msg
68 return msg
69
70class TaskBase(event.Event):
71 """Base class for task events"""
72
73 def __init__(self, t, logfile, d):
74 self._task = t
75 self._package = d.getVar("PF", True)
76 self.taskfile = d.getVar("FILE", True)
77 self.taskname = self._task
78 self.logfile = logfile
79 self.time = time.time()
80 event.Event.__init__(self)
81 self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName())
82
83 def getTask(self):
84 return self._task
85
86 def setTask(self, task):
87 self._task = task
88
89 def getDisplayName(self):
90 return bb.event.getName(self)[4:]
91
92 task = property(getTask, setTask, None, "task property")
93
94class TaskStarted(TaskBase):
95 """Task execution started"""
96 def __init__(self, t, logfile, taskflags, d):
97 super(TaskStarted, self).__init__(t, logfile, d)
98 self.taskflags = taskflags
99
100class TaskSucceeded(TaskBase):
101 """Task execution completed"""
102
103class TaskFailed(TaskBase):
104 """Task execution failed"""
105
106 def __init__(self, task, logfile, metadata, errprinted = False):
107 self.errprinted = errprinted
108 super(TaskFailed, self).__init__(task, logfile, metadata)
109
110class TaskFailedSilent(TaskBase):
111 """Task execution failed (silently)"""
112 def getDisplayName(self):
113 # Don't need to tell the user it was silent
114 return "Failed"
115
116class TaskInvalid(TaskBase):
117
118 def __init__(self, task, metadata):
119 super(TaskInvalid, self).__init__(task, None, metadata)
120 self._message = "No such task '%s'" % task
121
122
123class LogTee(object):
124 def __init__(self, logger, outfile):
125 self.outfile = outfile
126 self.logger = logger
127 self.name = self.outfile.name
128
129 def write(self, string):
130 self.logger.plain(string)
131 self.outfile.write(string)
132
133 def __enter__(self):
134 self.outfile.__enter__()
135 return self
136
137 def __exit__(self, *excinfo):
138 self.outfile.__exit__(*excinfo)
139
140 def __repr__(self):
141 return '<LogTee {0}>'.format(self.name)
142 def flush(self):
143 self.outfile.flush()
144
145def exec_func(func, d, dirs = None):
146 """Execute an BB 'function'"""
147
148 body = d.getVar(func)
149 if not body:
150 if body is None:
151 logger.warn("Function %s doesn't exist", func)
152 return
153
154 flags = d.getVarFlags(func)
155 cleandirs = flags.get('cleandirs')
156 if cleandirs:
157 for cdir in d.expand(cleandirs).split():
158 bb.utils.remove(cdir, True)
159 bb.utils.mkdirhier(cdir)
160
161 if dirs is None:
162 dirs = flags.get('dirs')
163 if dirs:
164 dirs = d.expand(dirs).split()
165
166 if dirs:
167 for adir in dirs:
168 bb.utils.mkdirhier(adir)
169 adir = dirs[-1]
170 else:
171 adir = d.getVar('B', True)
172 bb.utils.mkdirhier(adir)
173
174 ispython = flags.get('python')
175
176 lockflag = flags.get('lockfiles')
177 if lockflag:
178 lockfiles = [f for f in d.expand(lockflag).split()]
179 else:
180 lockfiles = None
181
182 tempdir = d.getVar('T', True)
183
184 # or func allows items to be executed outside of the normal
185 # task set, such as buildhistory
186 task = d.getVar('BB_RUNTASK', True) or func
187 if task == func:
188 taskfunc = task
189 else:
190 taskfunc = "%s.%s" % (task, func)
191
192 runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}"
193 runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
194 runfile = os.path.join(tempdir, runfn)
195 bb.utils.mkdirhier(os.path.dirname(runfile))
196
197 # Setup the courtesy link to the runfn, only for tasks
198 # we create the link 'just' before the run script is created
199 # if we create it after, and if the run script fails, then the
200 # link won't be created as an exception would be fired.
201 if task == func:
202 runlink = os.path.join(tempdir, 'run.{0}'.format(task))
203 if runlink:
204 bb.utils.remove(runlink)
205
206 try:
207 os.symlink(runfn, runlink)
208 except OSError:
209 pass
210
211 with bb.utils.fileslocked(lockfiles):
212 if ispython:
213 exec_func_python(func, d, runfile, cwd=adir)
214 else:
215 exec_func_shell(func, d, runfile, cwd=adir)
216
217_functionfmt = """
218def {function}(d):
219{body}
220
221{function}(d)
222"""
223logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
224def exec_func_python(func, d, runfile, cwd=None):
225 """Execute a python BB 'function'"""
226
227 bbfile = d.getVar('FILE', True)
228 code = _functionfmt.format(function=func, body=d.getVar(func, True))
229 bb.utils.mkdirhier(os.path.dirname(runfile))
230 with open(runfile, 'w') as script:
231 script.write(code)
232
233 if cwd:
234 try:
235 olddir = os.getcwd()
236 except OSError:
237 olddir = None
238 os.chdir(cwd)
239
240 bb.debug(2, "Executing python function %s" % func)
241
242 try:
243 comp = utils.better_compile(code, func, bbfile)
244 utils.better_exec(comp, {"d": d}, code, bbfile)
245 except:
246 if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
247 raise
248
249 raise FuncFailed(func, None)
250 finally:
251 bb.debug(2, "Python function %s finished" % func)
252
253 if cwd and olddir:
254 try:
255 os.chdir(olddir)
256 except OSError:
257 pass
258
259def shell_trap_code():
260 return '''#!/bin/sh\n
261# Emit a useful diagnostic if something fails:
262bb_exit_handler() {
263 ret=$?
264 case $ret in
265 0) ;;
266 *) case $BASH_VERSION in
267 "") echo "WARNING: exit code $ret from a shell command.";;
268 *) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from
269 \"$BASH_COMMAND\"";;
270 esac
271 exit $ret
272 esac
273}
274trap 'bb_exit_handler' 0
275set -e
276'''
277
278def exec_func_shell(func, d, runfile, cwd=None):
279 """Execute a shell function from the metadata
280
281 Note on directory behavior. The 'dirs' varflag should contain a list
282 of the directories you need created prior to execution. The last
283 item in the list is where we will chdir/cd to.
284 """
285
286 # Don't let the emitted shell script override PWD
287 d.delVarFlag('PWD', 'export')
288
289 with open(runfile, 'w') as script:
290 script.write(shell_trap_code())
291
292 bb.data.emit_func(func, script, d)
293
294 if bb.msg.loggerVerboseLogs:
295 script.write("set -x\n")
296 if cwd:
297 script.write("cd '%s'\n" % cwd)
298 script.write("%s\n" % func)
299 script.write('''
300# cleanup
301ret=$?
302trap '' 0
303exit $?
304''')
305
306 os.chmod(runfile, 0775)
307
308 cmd = runfile
309 if d.getVarFlag(func, 'fakeroot'):
310 fakerootcmd = d.getVar('FAKEROOT', True)
311 if fakerootcmd:
312 cmd = [fakerootcmd, runfile]
313
314 if bb.msg.loggerDefaultVerbose:
315 logfile = LogTee(logger, sys.stdout)
316 else:
317 logfile = sys.stdout
318
319 bb.debug(2, "Executing shell function %s" % func)
320
321 try:
322 with open(os.devnull, 'r+') as stdin:
323 bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
324 except bb.process.CmdError:
325 logfn = d.getVar('BB_LOGFILE', True)
326 raise FuncFailed(func, logfn)
327
328 bb.debug(2, "Shell function %s finished" % func)
329
330def _task_data(fn, task, d):
331 localdata = bb.data.createCopy(d)
332 localdata.setVar('BB_FILENAME', fn)
333 localdata.setVar('BB_CURRENTTASK', task[3:])
334 localdata.setVar('OVERRIDES', 'task-%s:%s' %
335 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
336 localdata.finalize()
337 bb.data.expandKeys(localdata)
338 return localdata
339
340def _exec_task(fn, task, d, quieterr):
341 """Execute a BB 'task'
342
343 Execution of a task involves a bit more setup than executing a function,
344 running it with its own local metadata, and with some useful variables set.
345 """
346 if not d.getVarFlag(task, 'task'):
347 event.fire(TaskInvalid(task, d), d)
348 logger.error("No such task: %s" % task)
349 return 1
350
351 logger.debug(1, "Executing task %s", task)
352
353 localdata = _task_data(fn, task, d)
354 tempdir = localdata.getVar('T', True)
355 if not tempdir:
356 bb.fatal("T variable not set, unable to build")
357
358 # Change nice level if we're asked to
359 nice = localdata.getVar("BB_TASK_NICE_LEVEL", True)
360 if nice:
361 curnice = os.nice(0)
362 nice = int(nice) - curnice
363 newnice = os.nice(nice)
364 logger.debug(1, "Renice to %s " % newnice)
365
366 bb.utils.mkdirhier(tempdir)
367
368 # Determine the logfile to generate
369 logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}'
370 logbase = logfmt.format(task=task, pid=os.getpid())
371
372 # Document the order of the tasks...
373 logorder = os.path.join(tempdir, 'log.task_order')
374 try:
375 with open(logorder, 'a') as logorderfile:
376 logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
377 except OSError:
378 logger.exception("Opening log file '%s'", logorder)
379 pass
380
381 # Setup the courtesy link to the logfn
382 loglink = os.path.join(tempdir, 'log.{0}'.format(task))
383 logfn = os.path.join(tempdir, logbase)
384 if loglink:
385 bb.utils.remove(loglink)
386
387 try:
388 os.symlink(logbase, loglink)
389 except OSError:
390 pass
391
392 prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
393 postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
394
395 class ErrorCheckHandler(logging.Handler):
396 def __init__(self):
397 self.triggered = False
398 logging.Handler.__init__(self, logging.ERROR)
399 def emit(self, record):
400 self.triggered = True
401
402 # Handle logfiles
403 si = open('/dev/null', 'r')
404 try:
405 bb.utils.mkdirhier(os.path.dirname(logfn))
406 logfile = open(logfn, 'w')
407 except OSError:
408 logger.exception("Opening log file '%s'", logfn)
409 pass
410
411 # Dup the existing fds so we dont lose them
412 osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
413 oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
414 ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
415
416 # Replace those fds with our own
417 os.dup2(si.fileno(), osi[1])
418 os.dup2(logfile.fileno(), oso[1])
419 os.dup2(logfile.fileno(), ose[1])
420
421 # Ensure python logging goes to the logfile
422 handler = logging.StreamHandler(logfile)
423 handler.setFormatter(logformatter)
424 # Always enable full debug output into task logfiles
425 handler.setLevel(logging.DEBUG - 2)
426 bblogger.addHandler(handler)
427
428 errchk = ErrorCheckHandler()
429 bblogger.addHandler(errchk)
430
431 localdata.setVar('BB_LOGFILE', logfn)
432 localdata.setVar('BB_RUNTASK', task)
433
434 flags = localdata.getVarFlags(task)
435
436 event.fire(TaskStarted(task, logfn, flags, localdata), localdata)
437 try:
438 for func in (prefuncs or '').split():
439 exec_func(func, localdata)
440 exec_func(task, localdata)
441 for func in (postfuncs or '').split():
442 exec_func(func, localdata)
443 except FuncFailed as exc:
444 if quieterr:
445 event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
446 else:
447 errprinted = errchk.triggered
448 logger.error(str(exc))
449 event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
450 return 1
451 finally:
452 sys.stdout.flush()
453 sys.stderr.flush()
454
455 bblogger.removeHandler(handler)
456
457 # Restore the backup fds
458 os.dup2(osi[0], osi[1])
459 os.dup2(oso[0], oso[1])
460 os.dup2(ose[0], ose[1])
461
462 # Close the backup fds
463 os.close(osi[0])
464 os.close(oso[0])
465 os.close(ose[0])
466 si.close()
467
468 logfile.close()
469 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
470 logger.debug(2, "Zero size logfn %s, removing", logfn)
471 bb.utils.remove(logfn)
472 bb.utils.remove(loglink)
473 event.fire(TaskSucceeded(task, logfn, localdata), localdata)
474
475 if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'):
476 make_stamp(task, localdata)
477
478 return 0
479
480def exec_task(fn, task, d, profile = False):
481 try:
482 quieterr = False
483 if d.getVarFlag(task, "quieterrors") is not None:
484 quieterr = True
485
486 if profile:
487 profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task)
488 try:
489 import cProfile as profile
490 except:
491 import profile
492 prof = profile.Profile()
493 ret = profile.Profile.runcall(prof, _exec_task, fn, task, d, quieterr)
494 prof.dump_stats(profname)
495 bb.utils.process_profilelog(profname)
496
497 return ret
498 else:
499 return _exec_task(fn, task, d, quieterr)
500
501 except Exception:
502 from traceback import format_exc
503 if not quieterr:
504 logger.error("Build of %s failed" % (task))
505 logger.error(format_exc())
506 failedevent = TaskFailed(task, None, d, True)
507 event.fire(failedevent, d)
508 return 1
509
510def stamp_internal(taskname, d, file_name):
511 """
512 Internal stamp helper function
513 Makes sure the stamp directory exists
514 Returns the stamp path+filename
515
516 In the bitbake core, d can be a CacheData and file_name will be set.
517 When called in task context, d will be a data store, file_name will not be set
518 """
519 taskflagname = taskname
520 if taskname.endswith("_setscene") and taskname != "do_setscene":
521 taskflagname = taskname.replace("_setscene", "")
522
523 if file_name:
524 stamp = d.stamp_base[file_name].get(taskflagname) or d.stamp[file_name]
525 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
526 else:
527 stamp = d.getVarFlag(taskflagname, 'stamp-base', True) or d.getVar('STAMP', True)
528 file_name = d.getVar('BB_FILENAME', True)
529 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
530
531 if not stamp:
532 return
533
534 stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
535
536 stampdir = os.path.dirname(stamp)
537 if bb.parse.cached_mtime_noerror(stampdir) == 0:
538 bb.utils.mkdirhier(stampdir)
539
540 return stamp
541
542def stamp_cleanmask_internal(taskname, d, file_name):
543 """
544 Internal stamp helper function to generate stamp cleaning mask
545 Returns the stamp path+filename
546
547 In the bitbake core, d can be a CacheData and file_name will be set.
548 When called in task context, d will be a data store, file_name will not be set
549 """
550 taskflagname = taskname
551 if taskname.endswith("_setscene") and taskname != "do_setscene":
552 taskflagname = taskname.replace("_setscene", "")
553
554 if file_name:
555 stamp = d.stamp_base_clean[file_name].get(taskflagname) or d.stampclean[file_name]
556 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
557 else:
558 stamp = d.getVarFlag(taskflagname, 'stamp-base-clean', True) or d.getVar('STAMPCLEAN', True)
559 file_name = d.getVar('BB_FILENAME', True)
560 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
561
562 if not stamp:
563 return []
564
565 cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo)
566
567 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
568
569def make_stamp(task, d, file_name = None):
570 """
571 Creates/updates a stamp for a given task
572 (d can be a data dict or dataCache)
573 """
574 cleanmask = stamp_cleanmask_internal(task, d, file_name)
575 for mask in cleanmask:
576 for name in glob.glob(mask):
577 # Preserve sigdata files in the stamps directory
578 if "sigdata" in name:
579 continue
580 # Preserve taint files in the stamps directory
581 if name.endswith('.taint'):
582 continue
583 os.unlink(name)
584
585 stamp = stamp_internal(task, d, file_name)
586 # Remove the file and recreate to force timestamp
587 # change on broken NFS filesystems
588 if stamp:
589 bb.utils.remove(stamp)
590 open(stamp, "w").close()
591
592 # If we're in task context, write out a signature file for each task
593 # as it completes
594 if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
595 file_name = d.getVar('BB_FILENAME', True)
596 bb.parse.siggen.dump_sigtask(file_name, task, d.getVar('STAMP', True), True)
597
598def del_stamp(task, d, file_name = None):
599 """
600 Removes a stamp for a given task
601 (d can be a data dict or dataCache)
602 """
603 stamp = stamp_internal(task, d, file_name)
604 bb.utils.remove(stamp)
605
606def write_taint(task, d, file_name = None):
607 """
608 Creates a "taint" file which will force the specified task and its
609 dependents to be re-run the next time by influencing the value of its
610 taskhash.
611 (d can be a data dict or dataCache)
612 """
613 import uuid
614 if file_name:
615 taintfn = d.stamp[file_name] + '.' + task + '.taint'
616 else:
617 taintfn = d.getVar('STAMP', True) + '.' + task + '.taint'
618 bb.utils.mkdirhier(os.path.dirname(taintfn))
619 # The specific content of the taint file is not really important,
620 # we just need it to be random, so a random UUID is used
621 with open(taintfn, 'w') as taintf:
622 taintf.write(str(uuid.uuid4()))
623
624def stampfile(taskname, d, file_name = None):
625 """
626 Return the stamp for a given task
627 (d can be a data dict or dataCache)
628 """
629 return stamp_internal(taskname, d, file_name)
630
631def add_tasks(tasklist, deltasklist, d):
632 task_deps = d.getVar('_task_deps')
633 if not task_deps:
634 task_deps = {}
635 if not 'tasks' in task_deps:
636 task_deps['tasks'] = []
637 if not 'parents' in task_deps:
638 task_deps['parents'] = {}
639
640 for task in tasklist:
641 task = d.expand(task)
642
643 if task in deltasklist:
644 continue
645
646 d.setVarFlag(task, 'task', 1)
647
648 if not task in task_deps['tasks']:
649 task_deps['tasks'].append(task)
650
651 flags = d.getVarFlags(task)
652 def getTask(name):
653 if not name in task_deps:
654 task_deps[name] = {}
655 if name in flags:
656 deptask = d.expand(flags[name])
657 task_deps[name][task] = deptask
658 getTask('depends')
659 getTask('rdepends')
660 getTask('deptask')
661 getTask('rdeptask')
662 getTask('recrdeptask')
663 getTask('recideptask')
664 getTask('nostamp')
665 getTask('fakeroot')
666 getTask('noexec')
667 getTask('umask')
668 task_deps['parents'][task] = []
669 if 'deps' in flags:
670 for dep in flags['deps']:
671 dep = d.expand(dep)
672 task_deps['parents'][task].append(dep)
673
674 # don't assume holding a reference
675 d.setVar('_task_deps', task_deps)
676
677def addtask(task, before, after, d):
678 if task[:3] != "do_":
679 task = "do_" + task
680
681 d.setVarFlag(task, "task", 1)
682 bbtasks = d.getVar('__BBTASKS') or []
683 if not task in bbtasks:
684 bbtasks.append(task)
685 d.setVar('__BBTASKS', bbtasks)
686
687 existing = d.getVarFlag(task, "deps") or []
688 if after is not None:
689 # set up deps for function
690 for entry in after.split():
691 if entry not in existing:
692 existing.append(entry)
693 d.setVarFlag(task, "deps", existing)
694 if before is not None:
695 # set up things that depend on this func
696 for entry in before.split():
697 existing = d.getVarFlag(entry, "deps") or []
698 if task not in existing:
699 d.setVarFlag(entry, "deps", [task] + existing)
700
701def deltask(task, d):
702 if task[:3] != "do_":
703 task = "do_" + task
704
705 bbtasks = d.getVar('__BBDELTASKS') or []
706 if not task in bbtasks:
707 bbtasks.append(task)
708 d.setVar('__BBDELTASKS', bbtasks)
709
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
new file mode 100644
index 0000000..431fc07
--- /dev/null
+++ b/bitbake/lib/bb/cache.py
@@ -0,0 +1,847 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Cache implementation
5#
6# Caching of bitbake variables before task execution
7
8# Copyright (C) 2006 Richard Purdie
9# Copyright (C) 2012 Intel Corporation
10
11# but small sections based on code from bin/bitbake:
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15# Copyright (C) 2005 Holger Hans Peter Freyther
16# Copyright (C) 2005 ROAD GmbH
17#
18# This program is free software; you can redistribute it and/or modify
19# it under the terms of the GNU General Public License version 2 as
20# published by the Free Software Foundation.
21#
22# This program is distributed in the hope that it will be useful,
23# but WITHOUT ANY WARRANTY; without even the implied warranty of
24# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25# GNU General Public License for more details.
26#
27# You should have received a copy of the GNU General Public License along
28# with this program; if not, write to the Free Software Foundation, Inc.,
29# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
30
31
32import os
33import logging
34from collections import defaultdict
35import bb.utils
36
37logger = logging.getLogger("BitBake.Cache")
38
39try:
40 import cPickle as pickle
41except ImportError:
42 import pickle
43 logger.info("Importing cPickle failed. "
44 "Falling back to a very slow implementation.")
45
46__cache_version__ = "147"
47
48def getCacheFile(path, filename, data_hash):
49 return os.path.join(path, filename + "." + data_hash)
50
51# RecipeInfoCommon defines common data retrieving methods
52# from meta data for caches. CoreRecipeInfo as well as other
53# Extra RecipeInfo needs to inherit this class
54class RecipeInfoCommon(object):
55
56 @classmethod
57 def listvar(cls, var, metadata):
58 return cls.getvar(var, metadata).split()
59
60 @classmethod
61 def intvar(cls, var, metadata):
62 return int(cls.getvar(var, metadata) or 0)
63
64 @classmethod
65 def depvar(cls, var, metadata):
66 return bb.utils.explode_deps(cls.getvar(var, metadata))
67
68 @classmethod
69 def pkgvar(cls, var, packages, metadata):
70 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
71 for pkg in packages)
72
73 @classmethod
74 def taskvar(cls, var, tasks, metadata):
75 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
76 for task in tasks)
77
78 @classmethod
79 def flaglist(cls, flag, varlist, metadata, squash=False):
80 out_dict = dict((var, metadata.getVarFlag(var, flag, True))
81 for var in varlist)
82 if squash:
83 return dict((k,v) for (k,v) in out_dict.iteritems() if v)
84 else:
85 return out_dict
86
87 @classmethod
88 def getvar(cls, var, metadata):
89 return metadata.getVar(var, True) or ''
90
91
92class CoreRecipeInfo(RecipeInfoCommon):
93 __slots__ = ()
94
95 cachefile = "bb_cache.dat"
96
97 def __init__(self, filename, metadata):
98 self.file_depends = metadata.getVar('__depends', False)
99 self.timestamp = bb.parse.cached_mtime(filename)
100 self.variants = self.listvar('__VARIANTS', metadata) + ['']
101 self.appends = self.listvar('__BBAPPEND', metadata)
102 self.nocache = self.getvar('__BB_DONT_CACHE', metadata)
103
104 self.skipreason = self.getvar('__SKIPPED', metadata)
105 if self.skipreason:
106 self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
107 self.skipped = True
108 self.provides = self.depvar('PROVIDES', metadata)
109 self.rprovides = self.depvar('RPROVIDES', metadata)
110 return
111
112 self.tasks = metadata.getVar('__BBTASKS', False)
113
114 self.pn = self.getvar('PN', metadata)
115 self.packages = self.listvar('PACKAGES', metadata)
116 if not self.pn in self.packages:
117 self.packages.append(self.pn)
118
119 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
120 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
121
122 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
123
124 self.skipped = False
125 self.pe = self.getvar('PE', metadata)
126 self.pv = self.getvar('PV', metadata)
127 self.pr = self.getvar('PR', metadata)
128 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
129 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
130 self.stamp = self.getvar('STAMP', metadata)
131 self.stampclean = self.getvar('STAMPCLEAN', metadata)
132 self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
133 self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata)
134 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
135 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
136 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
137 self.depends = self.depvar('DEPENDS', metadata)
138 self.provides = self.depvar('PROVIDES', metadata)
139 self.rdepends = self.depvar('RDEPENDS', metadata)
140 self.rprovides = self.depvar('RPROVIDES', metadata)
141 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
142 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
143 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
144 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
145 self.inherits = self.getvar('__inherit_cache', metadata)
146 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
147 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
148 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
149
150 @classmethod
151 def init_cacheData(cls, cachedata):
152 # CacheData in Core RecipeInfo Class
153 cachedata.task_deps = {}
154 cachedata.pkg_fn = {}
155 cachedata.pkg_pn = defaultdict(list)
156 cachedata.pkg_pepvpr = {}
157 cachedata.pkg_dp = {}
158
159 cachedata.stamp = {}
160 cachedata.stampclean = {}
161 cachedata.stamp_base = {}
162 cachedata.stamp_base_clean = {}
163 cachedata.stamp_extrainfo = {}
164 cachedata.file_checksums = {}
165 cachedata.fn_provides = {}
166 cachedata.pn_provides = defaultdict(list)
167 cachedata.all_depends = []
168
169 cachedata.deps = defaultdict(list)
170 cachedata.packages = defaultdict(list)
171 cachedata.providers = defaultdict(list)
172 cachedata.rproviders = defaultdict(list)
173 cachedata.packages_dynamic = defaultdict(list)
174
175 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
176 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
177 cachedata.possible_world = []
178 cachedata.universe_target = []
179 cachedata.hashfn = {}
180
181 cachedata.basetaskhash = {}
182 cachedata.inherits = {}
183 cachedata.fakerootenv = {}
184 cachedata.fakerootnoenv = {}
185 cachedata.fakerootdirs = {}
186
187 def add_cacheData(self, cachedata, fn):
188 cachedata.task_deps[fn] = self.task_deps
189 cachedata.pkg_fn[fn] = self.pn
190 cachedata.pkg_pn[self.pn].append(fn)
191 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
192 cachedata.pkg_dp[fn] = self.defaultpref
193 cachedata.stamp[fn] = self.stamp
194 cachedata.stampclean[fn] = self.stampclean
195 cachedata.stamp_base[fn] = self.stamp_base
196 cachedata.stamp_base_clean[fn] = self.stamp_base_clean
197 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
198 cachedata.file_checksums[fn] = self.file_checksums
199
200 provides = [self.pn]
201 for provide in self.provides:
202 if provide not in provides:
203 provides.append(provide)
204 cachedata.fn_provides[fn] = provides
205
206 for provide in provides:
207 cachedata.providers[provide].append(fn)
208 if provide not in cachedata.pn_provides[self.pn]:
209 cachedata.pn_provides[self.pn].append(provide)
210
211 for dep in self.depends:
212 if dep not in cachedata.deps[fn]:
213 cachedata.deps[fn].append(dep)
214 if dep not in cachedata.all_depends:
215 cachedata.all_depends.append(dep)
216
217 rprovides = self.rprovides
218 for package in self.packages:
219 cachedata.packages[package].append(fn)
220 rprovides += self.rprovides_pkg[package]
221
222 for rprovide in rprovides:
223 cachedata.rproviders[rprovide].append(fn)
224
225 for package in self.packages_dynamic:
226 cachedata.packages_dynamic[package].append(fn)
227
228 # Build hash of runtime depends and rececommends
229 for package in self.packages + [self.pn]:
230 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
231 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
232
233 # Collect files we may need for possible world-dep
234 # calculations
235 if not self.not_world:
236 cachedata.possible_world.append(fn)
237
238 # create a collection of all targets for sanity checking
239 # tasks, such as upstream versions, license, and tools for
240 # task and image creation.
241 cachedata.universe_target.append(self.pn)
242
243 cachedata.hashfn[fn] = self.hashfilename
244 for task, taskhash in self.basetaskhashes.iteritems():
245 identifier = '%s.%s' % (fn, task)
246 cachedata.basetaskhash[identifier] = taskhash
247
248 cachedata.inherits[fn] = self.inherits
249 cachedata.fakerootenv[fn] = self.fakerootenv
250 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
251 cachedata.fakerootdirs[fn] = self.fakerootdirs
252
253
254
255class Cache(object):
256 """
257 BitBake Cache implementation
258 """
259
260 def __init__(self, data, data_hash, caches_array):
261 # Pass caches_array information into Cache Constructor
262 # It will be used in later for deciding whether we
263 # need extra cache file dump/load support
264 self.caches_array = caches_array
265 self.cachedir = data.getVar("CACHE", True)
266 self.clean = set()
267 self.checked = set()
268 self.depends_cache = {}
269 self.data = None
270 self.data_fn = None
271 self.cacheclean = True
272 self.data_hash = data_hash
273
274 if self.cachedir in [None, '']:
275 self.has_cache = False
276 logger.info("Not using a cache. "
277 "Set CACHE = <directory> to enable.")
278 return
279
280 self.has_cache = True
281 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
282
283 logger.debug(1, "Using cache in '%s'", self.cachedir)
284 bb.utils.mkdirhier(self.cachedir)
285
286 cache_ok = True
287 if self.caches_array:
288 for cache_class in self.caches_array:
289 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
290 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
291 cache_ok = cache_ok and os.path.exists(cachefile)
292 cache_class.init_cacheData(self)
293 if cache_ok:
294 self.load_cachefile()
295 elif os.path.isfile(self.cachefile):
296 logger.info("Out of date cache found, rebuilding...")
297
298 def load_cachefile(self):
299 # Firstly, using core cache file information for
300 # valid checking
301 with open(self.cachefile, "rb") as cachefile:
302 pickled = pickle.Unpickler(cachefile)
303 try:
304 cache_ver = pickled.load()
305 bitbake_ver = pickled.load()
306 except Exception:
307 logger.info('Invalid cache, rebuilding...')
308 return
309
310 if cache_ver != __cache_version__:
311 logger.info('Cache version mismatch, rebuilding...')
312 return
313 elif bitbake_ver != bb.__version__:
314 logger.info('Bitbake version mismatch, rebuilding...')
315 return
316
317
318 cachesize = 0
319 previous_progress = 0
320 previous_percent = 0
321
322 # Calculate the correct cachesize of all those cache files
323 for cache_class in self.caches_array:
324 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
325 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
326 with open(cachefile, "rb") as cachefile:
327 cachesize += os.fstat(cachefile.fileno()).st_size
328
329 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
330
331 for cache_class in self.caches_array:
332 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
333 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
334 with open(cachefile, "rb") as cachefile:
335 pickled = pickle.Unpickler(cachefile)
336 while cachefile:
337 try:
338 key = pickled.load()
339 value = pickled.load()
340 except Exception:
341 break
342 if self.depends_cache.has_key(key):
343 self.depends_cache[key].append(value)
344 else:
345 self.depends_cache[key] = [value]
346 # only fire events on even percentage boundaries
347 current_progress = cachefile.tell() + previous_progress
348 current_percent = 100 * current_progress / cachesize
349 if current_percent > previous_percent:
350 previous_percent = current_percent
351 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
352 self.data)
353
354 previous_progress += current_progress
355
356 # Note: depends cache number is corresponding to the parsing file numbers.
357 # The same file has several caches, still regarded as one item in the cache
358 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
359 len(self.depends_cache)),
360 self.data)
361
362
363 @staticmethod
364 def virtualfn2realfn(virtualfn):
365 """
366 Convert a virtual file name to a real one + the associated subclass keyword
367 """
368
369 fn = virtualfn
370 cls = ""
371 if virtualfn.startswith('virtual:'):
372 elems = virtualfn.split(':')
373 cls = ":".join(elems[1:-1])
374 fn = elems[-1]
375 return (fn, cls)
376
377 @staticmethod
378 def realfn2virtual(realfn, cls):
379 """
380 Convert a real filename + the associated subclass keyword to a virtual filename
381 """
382 if cls == "":
383 return realfn
384 return "virtual:" + cls + ":" + realfn
385
386 @classmethod
387 def loadDataFull(cls, virtualfn, appends, cfgData):
388 """
389 Return a complete set of data for fn.
390 To do this, we need to parse the file.
391 """
392
393 (fn, virtual) = cls.virtualfn2realfn(virtualfn)
394
395 logger.debug(1, "Parsing %s (full)", fn)
396
397 cfgData.setVar("__ONLYFINALISE", virtual or "default")
398 bb_data = cls.load_bbfile(fn, appends, cfgData)
399 return bb_data[virtual]
400
401 @classmethod
402 def parse(cls, filename, appends, configdata, caches_array):
403 """Parse the specified filename, returning the recipe information"""
404 infos = []
405 datastores = cls.load_bbfile(filename, appends, configdata)
406 depends = []
407 for variant, data in sorted(datastores.iteritems(),
408 key=lambda i: i[0],
409 reverse=True):
410 virtualfn = cls.realfn2virtual(filename, variant)
411 depends = depends + (data.getVar("__depends", False) or [])
412 if depends and not variant:
413 data.setVar("__depends", depends)
414
415 info_array = []
416 for cache_class in caches_array:
417 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
418 info = cache_class(filename, data)
419 info_array.append(info)
420 infos.append((virtualfn, info_array))
421
422 return infos
423
424 def load(self, filename, appends, configdata):
425 """Obtain the recipe information for the specified filename,
426 using cached values if available, otherwise parsing.
427
428 Note that if it does parse to obtain the info, it will not
429 automatically add the information to the cache or to your
430 CacheData. Use the add or add_info method to do so after
431 running this, or use loadData instead."""
432 cached = self.cacheValid(filename, appends)
433 if cached:
434 infos = []
435 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
436 info_array = self.depends_cache[filename]
437 for variant in info_array[0].variants:
438 virtualfn = self.realfn2virtual(filename, variant)
439 infos.append((virtualfn, self.depends_cache[virtualfn]))
440 else:
441 logger.debug(1, "Parsing %s", filename)
442 return self.parse(filename, appends, configdata, self.caches_array)
443
444 return cached, infos
445
446 def loadData(self, fn, appends, cfgData, cacheData):
447 """Load the recipe info for the specified filename,
448 parsing and adding to the cache if necessary, and adding
449 the recipe information to the supplied CacheData instance."""
450 skipped, virtuals = 0, 0
451
452 cached, infos = self.load(fn, appends, cfgData)
453 for virtualfn, info_array in infos:
454 if info_array[0].skipped:
455 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
456 skipped += 1
457 else:
458 self.add_info(virtualfn, info_array, cacheData, not cached)
459 virtuals += 1
460
461 return cached, skipped, virtuals
462
463 def cacheValid(self, fn, appends):
464 """
465 Is the cache valid for fn?
466 Fast version, no timestamps checked.
467 """
468 if fn not in self.checked:
469 self.cacheValidUpdate(fn, appends)
470
471 # Is cache enabled?
472 if not self.has_cache:
473 return False
474 if fn in self.clean:
475 return True
476 return False
477
478 def cacheValidUpdate(self, fn, appends):
479 """
480 Is the cache valid for fn?
481 Make thorough (slower) checks including timestamps.
482 """
483 # Is cache enabled?
484 if not self.has_cache:
485 return False
486
487 self.checked.add(fn)
488
489 # File isn't in depends_cache
490 if not fn in self.depends_cache:
491 logger.debug(2, "Cache: %s is not cached", fn)
492 return False
493
494 mtime = bb.parse.cached_mtime_noerror(fn)
495
496 # Check file still exists
497 if mtime == 0:
498 logger.debug(2, "Cache: %s no longer exists", fn)
499 self.remove(fn)
500 return False
501
502 info_array = self.depends_cache[fn]
503 # Check the file's timestamp
504 if mtime != info_array[0].timestamp:
505 logger.debug(2, "Cache: %s changed", fn)
506 self.remove(fn)
507 return False
508
509 # Check dependencies are still valid
510 depends = info_array[0].file_depends
511 if depends:
512 for f, old_mtime in depends:
513 fmtime = bb.parse.cached_mtime_noerror(f)
514 # Check if file still exists
515 if old_mtime != 0 and fmtime == 0:
516 logger.debug(2, "Cache: %s's dependency %s was removed",
517 fn, f)
518 self.remove(fn)
519 return False
520
521 if (fmtime != old_mtime):
522 logger.debug(2, "Cache: %s's dependency %s changed",
523 fn, f)
524 self.remove(fn)
525 return False
526
527 if hasattr(info_array[0], 'file_checksums'):
528 for _, fl in info_array[0].file_checksums.items():
529 for f in fl.split():
530 if not ('*' in f or os.path.exists(f)):
531 logger.debug(2, "Cache: %s's file checksum list file %s was removed",
532 fn, f)
533 self.remove(fn)
534 return False
535
536 if appends != info_array[0].appends:
537 logger.debug(2, "Cache: appends for %s changed", fn)
538 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
539 self.remove(fn)
540 return False
541
542 invalid = False
543 for cls in info_array[0].variants:
544 virtualfn = self.realfn2virtual(fn, cls)
545 self.clean.add(virtualfn)
546 if virtualfn not in self.depends_cache:
547 logger.debug(2, "Cache: %s is not cached", virtualfn)
548 invalid = True
549
550 # If any one of the variants is not present, mark as invalid for all
551 if invalid:
552 for cls in info_array[0].variants:
553 virtualfn = self.realfn2virtual(fn, cls)
554 if virtualfn in self.clean:
555 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
556 self.clean.remove(virtualfn)
557 if fn in self.clean:
558 logger.debug(2, "Cache: Marking %s as not clean", fn)
559 self.clean.remove(fn)
560 return False
561
562 self.clean.add(fn)
563 return True
564
565 def remove(self, fn):
566 """
567 Remove a fn from the cache
568 Called from the parser in error cases
569 """
570 if fn in self.depends_cache:
571 logger.debug(1, "Removing %s from cache", fn)
572 del self.depends_cache[fn]
573 if fn in self.clean:
574 logger.debug(1, "Marking %s as unclean", fn)
575 self.clean.remove(fn)
576
577 def sync(self):
578 """
579 Save the cache
580 Called from the parser when complete (or exiting)
581 """
582
583 if not self.has_cache:
584 return
585
586 if self.cacheclean:
587 logger.debug(2, "Cache is clean, not saving.")
588 return
589
590 file_dict = {}
591 pickler_dict = {}
592 for cache_class in self.caches_array:
593 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
594 cache_class_name = cache_class.__name__
595 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
596 file_dict[cache_class_name] = open(cachefile, "wb")
597 pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
598
599 pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
600 pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
601
602 try:
603 for key, info_array in self.depends_cache.iteritems():
604 for info in info_array:
605 if isinstance(info, RecipeInfoCommon):
606 cache_class_name = info.__class__.__name__
607 pickler_dict[cache_class_name].dump(key)
608 pickler_dict[cache_class_name].dump(info)
609 finally:
610 for cache_class in self.caches_array:
611 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
612 cache_class_name = cache_class.__name__
613 file_dict[cache_class_name].close()
614
615 del self.depends_cache
616
617 @staticmethod
618 def mtime(cachefile):
619 return bb.parse.cached_mtime_noerror(cachefile)
620
621 def add_info(self, filename, info_array, cacheData, parsed=None):
622 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
623 cacheData.add_from_recipeinfo(filename, info_array)
624
625 if not self.has_cache:
626 return
627
628 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
629 if parsed:
630 self.cacheclean = False
631 self.depends_cache[filename] = info_array
632
633 def add(self, file_name, data, cacheData, parsed=None):
634 """
635 Save data we need into the cache
636 """
637
638 realfn = self.virtualfn2realfn(file_name)[0]
639
640 info_array = []
641 for cache_class in self.caches_array:
642 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
643 info_array.append(cache_class(realfn, data))
644 self.add_info(file_name, info_array, cacheData, parsed)
645
646 @staticmethod
647 def load_bbfile(bbfile, appends, config):
648 """
649 Load and parse one .bb build file
650 Return the data and whether parsing resulted in the file being skipped
651 """
652 chdir_back = False
653
654 from bb import data, parse
655
656 # expand tmpdir to include this topdir
657 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
658 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
659 oldpath = os.path.abspath(os.getcwd())
660 parse.cached_mtime_noerror(bbfile_loc)
661 bb_data = data.init_db(config)
662 # The ConfHandler first looks if there is a TOPDIR and if not
663 # then it would call getcwd().
664 # Previously, we chdir()ed to bbfile_loc, called the handler
665 # and finally chdir()ed back, a couple of thousand times. We now
666 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
667 if not data.getVar('TOPDIR', bb_data):
668 chdir_back = True
669 data.setVar('TOPDIR', bbfile_loc, bb_data)
670 try:
671 if appends:
672 data.setVar('__BBAPPEND', " ".join(appends), bb_data)
673 bb_data = parse.handle(bbfile, bb_data)
674 if chdir_back:
675 os.chdir(oldpath)
676 return bb_data
677 except:
678 if chdir_back:
679 os.chdir(oldpath)
680 raise
681
682
683def init(cooker):
684 """
685 The Objective: Cache the minimum amount of data possible yet get to the
686 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
687
688 To do this, we intercept getVar calls and only cache the variables we see
689 being accessed. We rely on the cache getVar calls being made for all
690 variables bitbake might need to use to reach this stage. For each cached
691 file we need to track:
692
693 * Its mtime
694 * The mtimes of all its dependencies
695 * Whether it caused a parse.SkipPackage exception
696
697 Files causing parsing errors are evicted from the cache.
698
699 """
700 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
701
702
703class CacheData(object):
704 """
705 The data structures we compile from the cached data
706 """
707
708 def __init__(self, caches_array):
709 self.caches_array = caches_array
710 for cache_class in self.caches_array:
711 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
712 cache_class.init_cacheData(self)
713
714 # Direct cache variables
715 self.task_queues = {}
716 self.preferred = {}
717 self.tasks = {}
718 # Indirect Cache variables (set elsewhere)
719 self.ignored_dependencies = []
720 self.world_target = set()
721 self.bbfile_priority = {}
722
723 def add_from_recipeinfo(self, fn, info_array):
724 for info in info_array:
725 info.add_cacheData(self, fn)
726
727class MultiProcessCache(object):
728 """
729 BitBake multi-process cache implementation
730
731 Used by the codeparser & file checksum caches
732 """
733
734 def __init__(self):
735 self.cachefile = None
736 self.cachedata = self.create_cachedata()
737 self.cachedata_extras = self.create_cachedata()
738
739 def init_cache(self, d):
740 cachedir = (d.getVar("PERSISTENT_DIR", True) or
741 d.getVar("CACHE", True))
742 if cachedir in [None, '']:
743 return
744 bb.utils.mkdirhier(cachedir)
745 self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
746 logger.debug(1, "Using cache in '%s'", self.cachefile)
747
748 glf = bb.utils.lockfile(self.cachefile + ".lock")
749
750 try:
751 with open(self.cachefile, "rb") as f:
752 p = pickle.Unpickler(f)
753 data, version = p.load()
754 except:
755 bb.utils.unlockfile(glf)
756 return
757
758 bb.utils.unlockfile(glf)
759
760 if version != self.__class__.CACHE_VERSION:
761 return
762
763 self.cachedata = data
764
765 def internSet(self, items):
766 new = set()
767 for i in items:
768 new.add(intern(i))
769 return new
770
771 def compress_keys(self, data):
772 # Override in subclasses if desired
773 return
774
775 def create_cachedata(self):
776 data = [{}]
777 return data
778
779 def save_extras(self, d):
780 if not self.cachefile:
781 return
782
783 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
784
785 i = os.getpid()
786 lf = None
787 while not lf:
788 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
789 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
790 if lf:
791 bb.utils.unlockfile(lf)
792 lf = None
793 i = i + 1
794 continue
795
796 with open(self.cachefile + "-" + str(i), "wb") as f:
797 p = pickle.Pickler(f, -1)
798 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
799
800 bb.utils.unlockfile(lf)
801 bb.utils.unlockfile(glf)
802
803 def merge_data(self, source, dest):
804 for j in range(0,len(dest)):
805 for h in source[j]:
806 if h not in dest[j]:
807 dest[j][h] = source[j][h]
808
809 def save_merge(self, d):
810 if not self.cachefile:
811 return
812
813 glf = bb.utils.lockfile(self.cachefile + ".lock")
814
815 try:
816 with open(self.cachefile, "rb") as f:
817 p = pickle.Unpickler(f)
818 data, version = p.load()
819 except (IOError, EOFError):
820 data, version = None, None
821
822 if version != self.__class__.CACHE_VERSION:
823 data = self.create_cachedata()
824
825 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
826 f = os.path.join(os.path.dirname(self.cachefile), f)
827 try:
828 with open(f, "rb") as fd:
829 p = pickle.Unpickler(fd)
830 extradata, version = p.load()
831 except (IOError, EOFError):
832 extradata, version = self.create_cachedata(), None
833
834 if version != self.__class__.CACHE_VERSION:
835 continue
836
837 self.merge_data(extradata, data)
838 os.unlink(f)
839
840 self.compress_keys(data)
841
842 with open(self.cachefile, "wb") as f:
843 p = pickle.Pickler(f, -1)
844 p.dump([data, self.__class__.CACHE_VERSION])
845
846 bb.utils.unlockfile(glf)
847
diff --git a/bitbake/lib/bb/cache_extra.py b/bitbake/lib/bb/cache_extra.py
new file mode 100644
index 0000000..83f4959
--- /dev/null
+++ b/bitbake/lib/bb/cache_extra.py
@@ -0,0 +1,75 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Extra RecipeInfo will be all defined in this file. Currently,
5# Only Hob (Image Creator) Requests some extra fields. So
6# HobRecipeInfo is defined. It's named HobRecipeInfo because it
7# is introduced by 'hob'. Users could also introduce other
8# RecipeInfo or simply use those already defined RecipeInfo.
9# In the following patch, this newly defined new extra RecipeInfo
10# will be dynamically loaded and used for loading/saving the extra
11# cache fields
12
13# Copyright (C) 2011, Intel Corporation. All rights reserved.
14
15# This program is free software; you can redistribute it and/or modify
16# it under the terms of the GNU General Public License version 2 as
17# published by the Free Software Foundation.
18#
19# This program is distributed in the hope that it will be useful,
20# but WITHOUT ANY WARRANTY; without even the implied warranty of
21# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22# GNU General Public License for more details.
23#
24# You should have received a copy of the GNU General Public License along
25# with this program; if not, write to the Free Software Foundation, Inc.,
26# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
27
28from bb.cache import RecipeInfoCommon
29
30class HobRecipeInfo(RecipeInfoCommon):
31 __slots__ = ()
32
33 classname = "HobRecipeInfo"
34 # please override this member with the correct data cache file
35 # such as (bb_cache.dat, bb_extracache_hob.dat)
36 cachefile = "bb_extracache_" + classname +".dat"
37
38 # override this member with the list of extra cache fields
39 # that this class will provide
40 cachefields = ['summary', 'license', 'section',
41 'description', 'homepage', 'bugtracker',
42 'prevision', 'files_info']
43
44 def __init__(self, filename, metadata):
45
46 self.summary = self.getvar('SUMMARY', metadata)
47 self.license = self.getvar('LICENSE', metadata)
48 self.section = self.getvar('SECTION', metadata)
49 self.description = self.getvar('DESCRIPTION', metadata)
50 self.homepage = self.getvar('HOMEPAGE', metadata)
51 self.bugtracker = self.getvar('BUGTRACKER', metadata)
52 self.prevision = self.getvar('PR', metadata)
53 self.files_info = self.getvar('FILES_INFO', metadata)
54
55 @classmethod
56 def init_cacheData(cls, cachedata):
57 # CacheData in Hob RecipeInfo Class
58 cachedata.summary = {}
59 cachedata.license = {}
60 cachedata.section = {}
61 cachedata.description = {}
62 cachedata.homepage = {}
63 cachedata.bugtracker = {}
64 cachedata.prevision = {}
65 cachedata.files_info = {}
66
67 def add_cacheData(self, cachedata, fn):
68 cachedata.summary[fn] = self.summary
69 cachedata.license[fn] = self.license
70 cachedata.section[fn] = self.section
71 cachedata.description[fn] = self.description
72 cachedata.homepage[fn] = self.homepage
73 cachedata.bugtracker[fn] = self.bugtracker
74 cachedata.prevision[fn] = self.prevision
75 cachedata.files_info[fn] = self.files_info
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py
new file mode 100644
index 0000000..514ff0b
--- /dev/null
+++ b/bitbake/lib/bb/checksum.py
@@ -0,0 +1,90 @@
1# Local file checksum cache implementation
2#
3# Copyright (C) 2012 Intel Corporation
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as
7# published by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License along
15# with this program; if not, write to the Free Software Foundation, Inc.,
16# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17
18import os
19import stat
20import bb.utils
21import logging
22from bb.cache import MultiProcessCache
23
24logger = logging.getLogger("BitBake.Cache")
25
26try:
27 import cPickle as pickle
28except ImportError:
29 import pickle
30 logger.info("Importing cPickle failed. "
31 "Falling back to a very slow implementation.")
32
33
34# mtime cache (non-persistent)
35# based upon the assumption that files do not change during bitbake run
36class FileMtimeCache(object):
37 cache = {}
38
39 def cached_mtime(self, f):
40 if f not in self.cache:
41 self.cache[f] = os.stat(f)[stat.ST_MTIME]
42 return self.cache[f]
43
44 def cached_mtime_noerror(self, f):
45 if f not in self.cache:
46 try:
47 self.cache[f] = os.stat(f)[stat.ST_MTIME]
48 except OSError:
49 return 0
50 return self.cache[f]
51
52 def update_mtime(self, f):
53 self.cache[f] = os.stat(f)[stat.ST_MTIME]
54 return self.cache[f]
55
56 def clear(self):
57 self.cache.clear()
58
59# Checksum + mtime cache (persistent)
60class FileChecksumCache(MultiProcessCache):
61 cache_file_name = "local_file_checksum_cache.dat"
62 CACHE_VERSION = 1
63
64 def __init__(self):
65 self.mtime_cache = FileMtimeCache()
66 MultiProcessCache.__init__(self)
67
68 def get_checksum(self, f):
69 entry = self.cachedata[0].get(f)
70 cmtime = self.mtime_cache.cached_mtime(f)
71 if entry:
72 (mtime, hashval) = entry
73 if cmtime == mtime:
74 return hashval
75 else:
76 bb.debug(2, "file %s changed mtime, recompute checksum" % f)
77
78 hashval = bb.utils.md5_file(f)
79 self.cachedata_extras[0][f] = (cmtime, hashval)
80 return hashval
81
82 def merge_data(self, source, dest):
83 for h in source[0]:
84 if h in dest:
85 (smtime, _) = source[0][h]
86 (dmtime, _) = dest[0][h]
87 if smtime > dmtime:
88 dest[0][h] = source[0][h]
89 else:
90 dest[0][h] = source[0][h]
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
new file mode 100644
index 0000000..a50b9f2
--- /dev/null
+++ b/bitbake/lib/bb/codeparser.py
@@ -0,0 +1,328 @@
1import ast
2import codegen
3import logging
4import os.path
5import bb.utils, bb.data
6from itertools import chain
7from pysh import pyshyacc, pyshlex, sherrors
8from bb.cache import MultiProcessCache
9
10
11logger = logging.getLogger('BitBake.CodeParser')
12
13try:
14 import cPickle as pickle
15except ImportError:
16 import pickle
17 logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
18
19
20def check_indent(codestr):
21 """If the code is indented, add a top level piece of code to 'remove' the indentation"""
22
23 i = 0
24 while codestr[i] in ["\n", "\t", " "]:
25 i = i + 1
26
27 if i == 0:
28 return codestr
29
30 if codestr[i-1] == "\t" or codestr[i-1] == " ":
31 return "if 1:\n" + codestr
32
33 return codestr
34
35
36class CodeParserCache(MultiProcessCache):
37 cache_file_name = "bb_codeparser.dat"
38 CACHE_VERSION = 4
39
40 def __init__(self):
41 MultiProcessCache.__init__(self)
42 self.pythoncache = self.cachedata[0]
43 self.shellcache = self.cachedata[1]
44 self.pythoncacheextras = self.cachedata_extras[0]
45 self.shellcacheextras = self.cachedata_extras[1]
46
47 def init_cache(self, d):
48 MultiProcessCache.init_cache(self, d)
49
50 # cachedata gets re-assigned in the parent
51 self.pythoncache = self.cachedata[0]
52 self.shellcache = self.cachedata[1]
53
54 def compress_keys(self, data):
55 # When the dicts are originally created, python calls intern() on the set keys
56 # which significantly improves memory usage. Sadly the pickle/unpickle process
57 # doesn't call intern() on the keys and results in the same strings being duplicated
58 # in memory. This also means pickle will save the same string multiple times in
59 # the cache file. By interning the data here, the cache file shrinks dramatically
60 # meaning faster load times and the reloaded cache files also consume much less
61 # memory. This is worth any performance hit from this loops and the use of the
62 # intern() data storage.
63 # Python 3.x may behave better in this area
64 for h in data[0]:
65 data[0][h]["refs"] = self.internSet(data[0][h]["refs"])
66 data[0][h]["execs"] = self.internSet(data[0][h]["execs"])
67 for k in data[0][h]["contains"]:
68 data[0][h]["contains"][k] = self.internSet(data[0][h]["contains"][k])
69 for h in data[1]:
70 data[1][h]["execs"] = self.internSet(data[1][h]["execs"])
71 return
72
73 def create_cachedata(self):
74 data = [{}, {}]
75 return data
76
77codeparsercache = CodeParserCache()
78
79def parser_cache_init(d):
80 codeparsercache.init_cache(d)
81
82def parser_cache_save(d):
83 codeparsercache.save_extras(d)
84
85def parser_cache_savemerge(d):
86 codeparsercache.save_merge(d)
87
88Logger = logging.getLoggerClass()
89class BufferedLogger(Logger):
90 def __init__(self, name, level=0, target=None):
91 Logger.__init__(self, name)
92 self.setLevel(level)
93 self.buffer = []
94 self.target = target
95
96 def handle(self, record):
97 self.buffer.append(record)
98
99 def flush(self):
100 for record in self.buffer:
101 self.target.handle(record)
102 self.buffer = []
103
104class PythonParser():
105 getvars = ("d.getVar", "bb.data.getVar", "data.getVar", "d.appendVar", "d.prependVar")
106 containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains")
107 execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
108
109 def warn(self, func, arg):
110 """Warn about calls of bitbake APIs which pass a non-literal
111 argument for the variable name, as we're not able to track such
112 a reference.
113 """
114
115 try:
116 funcstr = codegen.to_source(func)
117 argstr = codegen.to_source(arg)
118 except TypeError:
119 self.log.debug(2, 'Failed to convert function and argument to source form')
120 else:
121 self.log.debug(1, self.unhandled_message % (funcstr, argstr))
122
123 def visit_Call(self, node):
124 name = self.called_node_name(node.func)
125 if name in self.getvars or name in self.containsfuncs:
126 if isinstance(node.args[0], ast.Str):
127 varname = node.args[0].s
128 if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
129 if varname not in self.contains:
130 self.contains[varname] = set()
131 self.contains[varname].add(node.args[1].s)
132 else:
133 self.references.add(node.args[0].s)
134 else:
135 self.warn(node.func, node.args[0])
136 elif name in self.execfuncs:
137 if isinstance(node.args[0], ast.Str):
138 self.var_execs.add(node.args[0].s)
139 else:
140 self.warn(node.func, node.args[0])
141 elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
142 self.execs.add(name)
143
144 def called_node_name(self, node):
145 """Given a called node, return its original string form"""
146 components = []
147 while node:
148 if isinstance(node, ast.Attribute):
149 components.append(node.attr)
150 node = node.value
151 elif isinstance(node, ast.Name):
152 components.append(node.id)
153 return '.'.join(reversed(components))
154 else:
155 break
156
157 def __init__(self, name, log):
158 self.var_execs = set()
159 self.contains = {}
160 self.execs = set()
161 self.references = set()
162 self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log)
163
164 self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
165 self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
166
167 def parse_python(self, node):
168 h = hash(str(node))
169
170 if h in codeparsercache.pythoncache:
171 self.references = codeparsercache.pythoncache[h]["refs"]
172 self.execs = codeparsercache.pythoncache[h]["execs"]
173 self.contains = codeparsercache.pythoncache[h]["contains"]
174 return
175
176 if h in codeparsercache.pythoncacheextras:
177 self.references = codeparsercache.pythoncacheextras[h]["refs"]
178 self.execs = codeparsercache.pythoncacheextras[h]["execs"]
179 self.contains = codeparsercache.pythoncacheextras[h]["contains"]
180 return
181
182 code = compile(check_indent(str(node)), "<string>", "exec",
183 ast.PyCF_ONLY_AST)
184
185 for n in ast.walk(code):
186 if n.__class__.__name__ == "Call":
187 self.visit_Call(n)
188
189 self.execs.update(self.var_execs)
190
191 codeparsercache.pythoncacheextras[h] = {}
192 codeparsercache.pythoncacheextras[h]["refs"] = self.references
193 codeparsercache.pythoncacheextras[h]["execs"] = self.execs
194 codeparsercache.pythoncacheextras[h]["contains"] = self.contains
195
196class ShellParser():
197 def __init__(self, name, log):
198 self.funcdefs = set()
199 self.allexecs = set()
200 self.execs = set()
201 self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log)
202 self.unhandled_template = "unable to handle non-literal command '%s'"
203 self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
204
205 def parse_shell(self, value):
206 """Parse the supplied shell code in a string, returning the external
207 commands it executes.
208 """
209
210 h = hash(str(value))
211
212 if h in codeparsercache.shellcache:
213 self.execs = codeparsercache.shellcache[h]["execs"]
214 return self.execs
215
216 if h in codeparsercache.shellcacheextras:
217 self.execs = codeparsercache.shellcacheextras[h]["execs"]
218 return self.execs
219
220 try:
221 tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
222 except pyshlex.NeedMore:
223 raise sherrors.ShellSyntaxError("Unexpected EOF")
224
225 for token in tokens:
226 self.process_tokens(token)
227 self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
228
229 codeparsercache.shellcacheextras[h] = {}
230 codeparsercache.shellcacheextras[h]["execs"] = self.execs
231
232 return self.execs
233
234 def process_tokens(self, tokens):
235 """Process a supplied portion of the syntax tree as returned by
236 pyshyacc.parse.
237 """
238
239 def function_definition(value):
240 self.funcdefs.add(value.name)
241 return [value.body], None
242
243 def case_clause(value):
244 # Element 0 of each item in the case is the list of patterns, and
245 # Element 1 of each item in the case is the list of commands to be
246 # executed when that pattern matches.
247 words = chain(*[item[0] for item in value.items])
248 cmds = chain(*[item[1] for item in value.items])
249 return cmds, words
250
251 def if_clause(value):
252 main = chain(value.cond, value.if_cmds)
253 rest = value.else_cmds
254 if isinstance(rest, tuple) and rest[0] == "elif":
255 return chain(main, if_clause(rest[1]))
256 else:
257 return chain(main, rest)
258
259 def simple_command(value):
260 return None, chain(value.words, (assign[1] for assign in value.assigns))
261
262 token_handlers = {
263 "and_or": lambda x: ((x.left, x.right), None),
264 "async": lambda x: ([x], None),
265 "brace_group": lambda x: (x.cmds, None),
266 "for_clause": lambda x: (x.cmds, x.items),
267 "function_definition": function_definition,
268 "if_clause": lambda x: (if_clause(x), None),
269 "pipeline": lambda x: (x.commands, None),
270 "redirect_list": lambda x: ([x.cmd], None),
271 "subshell": lambda x: (x.cmds, None),
272 "while_clause": lambda x: (chain(x.condition, x.cmds), None),
273 "until_clause": lambda x: (chain(x.condition, x.cmds), None),
274 "simple_command": simple_command,
275 "case_clause": case_clause,
276 }
277
278 for token in tokens:
279 name, value = token
280 try:
281 more_tokens, words = token_handlers[name](value)
282 except KeyError:
283 raise NotImplementedError("Unsupported token type " + name)
284
285 if more_tokens:
286 self.process_tokens(more_tokens)
287
288 if words:
289 self.process_words(words)
290
291 def process_words(self, words):
292 """Process a set of 'words' in pyshyacc parlance, which includes
293 extraction of executed commands from $() blocks, as well as grabbing
294 the command name argument.
295 """
296
297 words = list(words)
298 for word in list(words):
299 wtree = pyshlex.make_wordtree(word[1])
300 for part in wtree:
301 if not isinstance(part, list):
302 continue
303
304 if part[0] in ('`', '$('):
305 command = pyshlex.wordtree_as_string(part[1:-1])
306 self.parse_shell(command)
307
308 if word[0] in ("cmd_name", "cmd_word"):
309 if word in words:
310 words.remove(word)
311
312 usetoken = False
313 for word in words:
314 if word[0] in ("cmd_name", "cmd_word") or \
315 (usetoken and word[0] == "TOKEN"):
316 if "=" in word[1]:
317 usetoken = True
318 continue
319
320 cmd = word[1]
321 if cmd.startswith("$"):
322 self.log.debug(1, self.unhandled_template % cmd)
323 elif cmd == "eval":
324 command = " ".join(word for _, word in words[1:])
325 self.parse_shell(command)
326 else:
327 self.allexecs.add(cmd)
328 break
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
new file mode 100644
index 0000000..84fcdf9
--- /dev/null
+++ b/bitbake/lib/bb/command.py
@@ -0,0 +1,444 @@
1"""
2BitBake 'Command' module
3
4Provide an interface to interact with the bitbake server through 'commands'
5"""
6
7# Copyright (C) 2006-2007 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22"""
23The bitbake server takes 'commands' from its UI/commandline.
24Commands are either synchronous or asynchronous.
25Async commands return data to the client in the form of events.
26Sync commands must only return data through the function return value
27and must not trigger events, directly or indirectly.
28Commands are queued in a CommandQueue
29"""
30
31import bb.event
32import bb.cooker
33
34class CommandCompleted(bb.event.Event):
35 pass
36
37class CommandExit(bb.event.Event):
38 def __init__(self, exitcode):
39 bb.event.Event.__init__(self)
40 self.exitcode = int(exitcode)
41
42class CommandFailed(CommandExit):
43 def __init__(self, message):
44 self.error = message
45 CommandExit.__init__(self, 1)
46
47class CommandError(Exception):
48 pass
49
50class Command:
51 """
52 A queue of asynchronous commands for bitbake
53 """
54 def __init__(self, cooker):
55 self.cooker = cooker
56 self.cmds_sync = CommandsSync()
57 self.cmds_async = CommandsAsync()
58
59 # FIXME Add lock for this
60 self.currentAsyncCommand = None
61
62 def runCommand(self, commandline, ro_only = False):
63 command = commandline.pop(0)
64 if hasattr(CommandsSync, command):
65 # Can run synchronous commands straight away
66 command_method = getattr(self.cmds_sync, command)
67 if ro_only:
68 if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
69 return None, "Not able to execute not readonly commands in readonly mode"
70 try:
71 result = command_method(self, commandline)
72 except CommandError as exc:
73 return None, exc.args[0]
74 except Exception:
75 import traceback
76 return None, traceback.format_exc()
77 else:
78 return result, None
79 if self.currentAsyncCommand is not None:
80 return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
81 if command not in CommandsAsync.__dict__:
82 return None, "No such command"
83 self.currentAsyncCommand = (command, commandline)
84 self.cooker.configuration.server_register_idlecallback(self.cooker.runCommands, self.cooker)
85 return True, None
86
87 def runAsyncCommand(self):
88 try:
89 if self.cooker.state == bb.cooker.state.error:
90 return False
91 if self.currentAsyncCommand is not None:
92 (command, options) = self.currentAsyncCommand
93 commandmethod = getattr(CommandsAsync, command)
94 needcache = getattr( commandmethod, "needcache" )
95 if needcache and self.cooker.state != bb.cooker.state.running:
96 self.cooker.updateCache()
97 return True
98 else:
99 commandmethod(self.cmds_async, self, options)
100 return False
101 else:
102 return False
103 except KeyboardInterrupt as exc:
104 self.finishAsyncCommand("Interrupted")
105 return False
106 except SystemExit as exc:
107 arg = exc.args[0]
108 if isinstance(arg, basestring):
109 self.finishAsyncCommand(arg)
110 else:
111 self.finishAsyncCommand("Exited with %s" % arg)
112 return False
113 except Exception as exc:
114 import traceback
115 if isinstance(exc, bb.BBHandledException):
116 self.finishAsyncCommand("")
117 else:
118 self.finishAsyncCommand(traceback.format_exc())
119 return False
120
121 def finishAsyncCommand(self, msg=None, code=None):
122 if msg or msg == "":
123 bb.event.fire(CommandFailed(msg), self.cooker.event_data)
124 elif code:
125 bb.event.fire(CommandExit(code), self.cooker.event_data)
126 else:
127 bb.event.fire(CommandCompleted(), self.cooker.event_data)
128 self.currentAsyncCommand = None
129 self.cooker.finishcommand()
130
131class CommandsSync:
132 """
133 A class of synchronous commands
134 These should run quickly so as not to hurt interactive performance.
135 These must not influence any running synchronous command.
136 """
137
138 def stateShutdown(self, command, params):
139 """
140 Trigger cooker 'shutdown' mode
141 """
142 command.cooker.shutdown(False)
143
144 def stateForceShutdown(self, command, params):
145 """
146 Stop the cooker
147 """
148 command.cooker.shutdown(True)
149
150 def getAllKeysWithFlags(self, command, params):
151 """
152 Returns a dump of the global state. Call with
153 variable flags to be retrieved as params.
154 """
155 flaglist = params[0]
156 return command.cooker.getAllKeysWithFlags(flaglist)
157 getAllKeysWithFlags.readonly = True
158
159 def getVariable(self, command, params):
160 """
161 Read the value of a variable from data
162 """
163 varname = params[0]
164 expand = True
165 if len(params) > 1:
166 expand = (params[1] == "True")
167
168 return command.cooker.data.getVar(varname, expand)
169 getVariable.readonly = True
170
171 def setVariable(self, command, params):
172 """
173 Set the value of variable in data
174 """
175 varname = params[0]
176 value = str(params[1])
177 command.cooker.data.setVar(varname, value)
178
179 def setConfig(self, command, params):
180 """
181 Set the value of variable in configuration
182 """
183 varname = params[0]
184 value = str(params[1])
185 setattr(command.cooker.configuration, varname, value)
186
187 def enableDataTracking(self, command, params):
188 """
189 Enable history tracking for variables
190 """
191 command.cooker.enableDataTracking()
192
193 def disableDataTracking(self, command, params):
194 """
195 Disable history tracking for variables
196 """
197 command.cooker.disableDataTracking()
198
199 def setPrePostConfFiles(self, command, params):
200 prefiles = params[0].split()
201 postfiles = params[1].split()
202 command.cooker.configuration.prefile = prefiles
203 command.cooker.configuration.postfile = postfiles
204
205 def getCpuCount(self, command, params):
206 """
207 Get the CPU count on the bitbake server
208 """
209 return bb.utils.cpu_count()
210 getCpuCount.readonly = True
211
212 def matchFile(self, command, params):
213 fMatch = params[0]
214 return command.cooker.matchFile(fMatch)
215
216 def generateNewImage(self, command, params):
217 image = params[0]
218 base_image = params[1]
219 package_queue = params[2]
220 timestamp = params[3]
221 description = params[4]
222 return command.cooker.generateNewImage(image, base_image,
223 package_queue, timestamp, description)
224
225 def ensureDir(self, command, params):
226 directory = params[0]
227 bb.utils.mkdirhier(directory)
228
229 def setVarFile(self, command, params):
230 """
231 Save a variable in a file; used for saving in a configuration file
232 """
233 var = params[0]
234 val = params[1]
235 default_file = params[2]
236 op = params[3]
237 command.cooker.modifyConfigurationVar(var, val, default_file, op)
238
239 def removeVarFile(self, command, params):
240 """
241 Remove a variable declaration from a file
242 """
243 var = params[0]
244 command.cooker.removeConfigurationVar(var)
245
246 def createConfigFile(self, command, params):
247 """
248 Create an extra configuration file
249 """
250 name = params[0]
251 command.cooker.createConfigFile(name)
252
253 def setEventMask(self, command, params):
254 handlerNum = params[0]
255 llevel = params[1]
256 debug_domains = params[2]
257 mask = params[3]
258 return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask)
259
260 def setFeatures(self, command, params):
261 """
262 Set the cooker features to include the passed list of features
263 """
264 features = params[0]
265 command.cooker.setFeatures(features)
266
267 # although we change the internal state of the cooker, this is transparent since
268 # we always take and leave the cooker in state.initial
269 setFeatures.readonly = True
270
271class CommandsAsync:
272 """
273 A class of asynchronous commands
274 These functions communicate via generated events.
275 Any function that requires metadata parsing should be here.
276 """
277
278 def buildFile(self, command, params):
279 """
280 Build a single specified .bb file
281 """
282 bfile = params[0]
283 task = params[1]
284
285 command.cooker.buildFile(bfile, task)
286 buildFile.needcache = False
287
288 def buildTargets(self, command, params):
289 """
290 Build a set of targets
291 """
292 pkgs_to_build = params[0]
293 task = params[1]
294
295 command.cooker.buildTargets(pkgs_to_build, task)
296 buildTargets.needcache = True
297
298 def generateDepTreeEvent(self, command, params):
299 """
300 Generate an event containing the dependency information
301 """
302 pkgs_to_build = params[0]
303 task = params[1]
304
305 command.cooker.generateDepTreeEvent(pkgs_to_build, task)
306 command.finishAsyncCommand()
307 generateDepTreeEvent.needcache = True
308
309 def generateDotGraph(self, command, params):
310 """
311 Dump dependency information to disk as .dot files
312 """
313 pkgs_to_build = params[0]
314 task = params[1]
315
316 command.cooker.generateDotGraphFiles(pkgs_to_build, task)
317 command.finishAsyncCommand()
318 generateDotGraph.needcache = True
319
320 def generateTargetsTree(self, command, params):
321 """
322 Generate a tree of buildable targets.
323 If klass is provided ensure all recipes that inherit the class are
324 included in the package list.
325 If pkg_list provided use that list (plus any extras brought in by
326 klass) rather than generating a tree for all packages.
327 """
328 klass = params[0]
329 pkg_list = params[1]
330
331 command.cooker.generateTargetsTree(klass, pkg_list)
332 command.finishAsyncCommand()
333 generateTargetsTree.needcache = True
334
335 def findCoreBaseFiles(self, command, params):
336 """
337 Find certain files in COREBASE directory. i.e. Layers
338 """
339 subdir = params[0]
340 filename = params[1]
341
342 command.cooker.findCoreBaseFiles(subdir, filename)
343 command.finishAsyncCommand()
344 findCoreBaseFiles.needcache = False
345
346 def findConfigFiles(self, command, params):
347 """
348 Find config files which provide appropriate values
349 for the passed configuration variable. i.e. MACHINE
350 """
351 varname = params[0]
352
353 command.cooker.findConfigFiles(varname)
354 command.finishAsyncCommand()
355 findConfigFiles.needcache = False
356
357 def findFilesMatchingInDir(self, command, params):
358 """
359 Find implementation files matching the specified pattern
360 in the requested subdirectory of a BBPATH
361 """
362 pattern = params[0]
363 directory = params[1]
364
365 command.cooker.findFilesMatchingInDir(pattern, directory)
366 command.finishAsyncCommand()
367 findFilesMatchingInDir.needcache = False
368
369 def findConfigFilePath(self, command, params):
370 """
371 Find the path of the requested configuration file
372 """
373 configfile = params[0]
374
375 command.cooker.findConfigFilePath(configfile)
376 command.finishAsyncCommand()
377 findConfigFilePath.needcache = False
378
379 def showVersions(self, command, params):
380 """
381 Show the currently selected versions
382 """
383 command.cooker.showVersions()
384 command.finishAsyncCommand()
385 showVersions.needcache = True
386
387 def showEnvironmentTarget(self, command, params):
388 """
389 Print the environment of a target recipe
390 (needs the cache to work out which recipe to use)
391 """
392 pkg = params[0]
393
394 command.cooker.showEnvironment(None, pkg)
395 command.finishAsyncCommand()
396 showEnvironmentTarget.needcache = True
397
398 def showEnvironment(self, command, params):
399 """
400 Print the standard environment
401 or if specified the environment for a specified recipe
402 """
403 bfile = params[0]
404
405 command.cooker.showEnvironment(bfile)
406 command.finishAsyncCommand()
407 showEnvironment.needcache = False
408
409 def parseFiles(self, command, params):
410 """
411 Parse the .bb files
412 """
413 command.cooker.updateCache()
414 command.finishAsyncCommand()
415 parseFiles.needcache = True
416
417 def compareRevisions(self, command, params):
418 """
419 Parse the .bb files
420 """
421 if bb.fetch.fetcher_compare_revisions(command.cooker.data):
422 command.finishAsyncCommand(code=1)
423 else:
424 command.finishAsyncCommand()
425 compareRevisions.needcache = True
426
427 def triggerEvent(self, command, params):
428 """
429 Trigger a certain event
430 """
431 event = params[0]
432 bb.event.fire(eval(event), command.cooker.data)
433 command.currentAsyncCommand = None
434 triggerEvent.needcache = False
435
436 def resetCooker(self, command, params):
437 """
438 Reset the cooker to its initial state, thus forcing a reparse for
439 any async command that has the needcache property set to True
440 """
441 command.cooker.reset()
442 command.finishAsyncCommand()
443 resetCooker.needcache = False
444
diff --git a/bitbake/lib/bb/compat.py b/bitbake/lib/bb/compat.py
new file mode 100644
index 0000000..de1923d
--- /dev/null
+++ b/bitbake/lib/bb/compat.py
@@ -0,0 +1,6 @@
1"""Code pulled from future python versions, here for compatibility"""
2
3from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict
4from functools import total_ordering
5
6
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
new file mode 100644
index 0000000..f44a088
--- /dev/null
+++ b/bitbake/lib/bb/cooker.py
@@ -0,0 +1,1874 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25from __future__ import print_function
26import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
33from cStringIO import StringIO
34from contextlib import closing
35from functools import wraps
36from collections import defaultdict
37import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
39import Queue
40import signal
41import prserv.serv
42
43logger = logging.getLogger("BitBake")
44collectlog = logging.getLogger("BitBake.Collection")
45buildlog = logging.getLogger("BitBake.Build")
46parselog = logging.getLogger("BitBake.Parsing")
47providerlog = logging.getLogger("BitBake.Provider")
48
49class NoSpecificMatch(bb.BBHandledException):
50 """
51 Exception raised when no or multiple file matches are found
52 """
53
54class NothingToBuild(Exception):
55 """
56 Exception raised when there is nothing to build
57 """
58
59class CollectionError(bb.BBHandledException):
60 """
61 Exception raised when layer configuration is incorrect
62 """
63
64class state:
65 initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
66
67
68class SkippedPackage:
69 def __init__(self, info = None, reason = None):
70 self.pn = None
71 self.skipreason = None
72 self.provides = None
73 self.rprovides = None
74
75 if info:
76 self.pn = info.pn
77 self.skipreason = info.skipreason
78 self.provides = info.provides
79 self.rprovides = info.rprovides
80 elif reason:
81 self.skipreason = reason
82
83
84class CookerFeatures(object):
85 _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
86
87 def __init__(self):
88 self._features=set()
89
90 def setFeature(self, f):
91 # validate we got a request for a feature we support
92 if f not in CookerFeatures._feature_list:
93 return
94 self._features.add(f)
95
96 def __contains__(self, f):
97 return f in self._features
98
99 def __iter__(self):
100 return self._features.__iter__()
101
102 def next(self):
103 return self._features.next()
104
105
106#============================================================================#
107# BBCooker
108#============================================================================#
109class BBCooker:
110 """
111 Manages one bitbake build run
112 """
113
114 def __init__(self, configuration, featureSet = []):
115 self.recipecache = None
116 self.skiplist = {}
117 self.featureset = CookerFeatures()
118 for f in featureSet:
119 self.featureset.setFeature(f)
120
121 self.configuration = configuration
122
123 self.initConfigurationData()
124
125 # Take a lock so only one copy of bitbake can run against a given build
126 # directory at a time
127 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
128 self.lock = bb.utils.lockfile(lockfile, False, False)
129 if not self.lock:
130 bb.fatal("Only one copy of bitbake should be run against a build directory")
131 try:
132 self.lock.seek(0)
133 self.lock.truncate()
134 if len(configuration.interface) >= 2:
135 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
136 self.lock.flush()
137 except:
138 pass
139
140 # TOSTOP must not be set or our children will hang when they output
141 fd = sys.stdout.fileno()
142 if os.isatty(fd):
143 import termios
144 tcattr = termios.tcgetattr(fd)
145 if tcattr[3] & termios.TOSTOP:
146 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
147 tcattr[3] = tcattr[3] & ~termios.TOSTOP
148 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
149
150 self.command = bb.command.Command(self)
151 self.state = state.initial
152
153 self.parser = None
154
155 signal.signal(signal.SIGTERM, self.sigterm_exception)
156
157 def sigterm_exception(self, signum, stackframe):
158 bb.warn("Cooker recieved SIGTERM, shutting down...")
159 self.state = state.forceshutdown
160
161 def setFeatures(self, features):
162 # we only accept a new feature set if we're in state initial, so we can reset without problems
163 if self.state != state.initial:
164 raise Exception("Illegal state for feature set change")
165 original_featureset = list(self.featureset)
166 for feature in features:
167 self.featureset.setFeature(feature)
168 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
169 if (original_featureset != list(self.featureset)):
170 self.reset()
171
172 def initConfigurationData(self):
173
174 self.state = state.initial
175 self.caches_array = []
176
177 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
178 self.enableDataTracking()
179
180 all_extra_cache_names = []
181 # We hardcode all known cache types in a single place, here.
182 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
183 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
184
185 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
186
187 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
188 # This is the entry point, no further check needed!
189 for var in caches_name_array:
190 try:
191 module_name, cache_name = var.split(':')
192 module = __import__(module_name, fromlist=(cache_name,))
193 self.caches_array.append(getattr(module, cache_name))
194 except ImportError as exc:
195 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
196 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
197
198 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
199 self.databuilder.parseBaseConfiguration()
200 self.data = self.databuilder.data
201 self.data_hash = self.databuilder.data_hash
202
203 #
204 # Special updated configuration we use for firing events
205 #
206 self.event_data = bb.data.createCopy(self.data)
207 bb.data.update_data(self.event_data)
208 bb.parse.init_parser(self.event_data)
209
210 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
211 self.disableDataTracking()
212
213 def enableDataTracking(self):
214 self.configuration.tracking = True
215 if hasattr(self, "data"):
216 self.data.enableTracking()
217
218 def disableDataTracking(self):
219 self.configuration.tracking = False
220 if hasattr(self, "data"):
221 self.data.disableTracking()
222
223 def modifyConfigurationVar(self, var, val, default_file, op):
224 if op == "append":
225 self.appendConfigurationVar(var, val, default_file)
226 elif op == "set":
227 self.saveConfigurationVar(var, val, default_file, "=")
228 elif op == "earlyAssign":
229 self.saveConfigurationVar(var, val, default_file, "?=")
230
231
232 def appendConfigurationVar(self, var, val, default_file):
233 #add append var operation to the end of default_file
234 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
235
236 total = "#added by hob"
237 total += "\n%s += \"%s\"\n" % (var, val)
238
239 with open(default_file, 'a') as f:
240 f.write(total)
241
242 #add to history
243 loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
244 self.data.appendVar(var, val, **loginfo)
245
246 def saveConfigurationVar(self, var, val, default_file, op):
247
248 replaced = False
249 #do not save if nothing changed
250 if str(val) == self.data.getVar(var):
251 return
252
253 conf_files = self.data.varhistory.get_variable_files(var)
254
255 #format the value when it is a list
256 if isinstance(val, list):
257 listval = ""
258 for value in val:
259 listval += "%s " % value
260 val = listval
261
262 topdir = self.data.getVar("TOPDIR")
263
264 #comment or replace operations made on var
265 for conf_file in conf_files:
266 if topdir in conf_file:
267 with open(conf_file, 'r') as f:
268 contents = f.readlines()
269
270 lines = self.data.varhistory.get_variable_lines(var, conf_file)
271 for line in lines:
272 total = ""
273 i = 0
274 for c in contents:
275 total += c
276 i = i + 1
277 if i==int(line):
278 end_index = len(total)
279 index = total.rfind(var, 0, end_index)
280
281 begin_line = total.count("\n",0,index)
282 end_line = int(line)
283
284 #check if the variable was saved before in the same way
285 #if true it replace the place where the variable was declared
286 #else it comments it
287 if contents[begin_line-1]== "#added by hob\n":
288 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
289 replaced = True
290 else:
291 for ii in range(begin_line, end_line):
292 contents[ii] = "#" + contents[ii]
293
294 with open(conf_file, 'w') as f:
295 f.writelines(contents)
296
297 if replaced == False:
298 #remove var from history
299 self.data.varhistory.del_var_history(var)
300
301 #add var to the end of default_file
302 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
303
304 #add the variable on a single line, to be easy to replace the second time
305 total = "\n#added by hob"
306 total += "\n%s %s \"%s\"\n" % (var, op, val)
307
308 with open(default_file, 'a') as f:
309 f.write(total)
310
311 #add to history
312 loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
313 self.data.setVar(var, val, **loginfo)
314
315 def removeConfigurationVar(self, var):
316 conf_files = self.data.varhistory.get_variable_files(var)
317 topdir = self.data.getVar("TOPDIR")
318
319 for conf_file in conf_files:
320 if topdir in conf_file:
321 with open(conf_file, 'r') as f:
322 contents = f.readlines()
323
324 lines = self.data.varhistory.get_variable_lines(var, conf_file)
325 for line in lines:
326 total = ""
327 i = 0
328 for c in contents:
329 total += c
330 i = i + 1
331 if i==int(line):
332 end_index = len(total)
333 index = total.rfind(var, 0, end_index)
334
335 begin_line = total.count("\n",0,index)
336
337 #check if the variable was saved before in the same way
338 if contents[begin_line-1]== "#added by hob\n":
339 contents[begin_line-1] = contents[begin_line] = "\n"
340 else:
341 contents[begin_line] = "\n"
342 #remove var from history
343 self.data.varhistory.del_var_history(var, conf_file, line)
344 #remove variable
345 self.data.delVar(var)
346
347 with open(conf_file, 'w') as f:
348 f.writelines(contents)
349
350 def createConfigFile(self, name):
351 path = os.getcwd()
352 confpath = os.path.join(path, "conf", name)
353 open(confpath, 'w').close()
354
355 def parseConfiguration(self):
356 # Set log file verbosity
357 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0"))
358 if verboselogs:
359 bb.msg.loggerVerboseLogs = True
360
361 # Change nice level if we're asked to
362 nice = self.data.getVar("BB_NICE_LEVEL", True)
363 if nice:
364 curnice = os.nice(0)
365 nice = int(nice) - curnice
366 buildlog.verbose("Renice to %s " % os.nice(nice))
367
368 if self.recipecache:
369 del self.recipecache
370 self.recipecache = bb.cache.CacheData(self.caches_array)
371
372 self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
373
374 def runCommands(self, server, data, abort):
375 """
376 Run any queued asynchronous command
377 This is done by the idle handler so it runs in true context rather than
378 tied to any UI.
379 """
380
381 return self.command.runAsyncCommand()
382
383 def showVersions(self):
384
385 pkg_pn = self.recipecache.pkg_pn
386 (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
387
388 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
389 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
390
391 for p in sorted(pkg_pn):
392 pref = preferred_versions[p]
393 latest = latest_versions[p]
394
395 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
396 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
397
398 if pref == latest:
399 prefstr = ""
400
401 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
402
403 def showEnvironment(self, buildfile = None, pkgs_to_build = []):
404 """
405 Show the outer or per-package environment
406 """
407 fn = None
408 envdata = None
409
410 if buildfile:
411 # Parse the configuration here. We need to do it explicitly here since
412 # this showEnvironment() code path doesn't use the cache
413 self.parseConfiguration()
414
415 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
416 fn = self.matchFile(fn)
417 fn = bb.cache.Cache.realfn2virtual(fn, cls)
418 elif len(pkgs_to_build) == 1:
419 ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
420 if pkgs_to_build[0] in set(ignore.split()):
421 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
422
423 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort)
424
425 targetid = taskdata.getbuild_id(pkgs_to_build[0])
426 fnid = taskdata.build_targets[targetid][0]
427 fn = taskdata.fn_index[fnid]
428 else:
429 envdata = self.data
430
431 if fn:
432 try:
433 envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
434 except Exception as e:
435 parselog.exception("Unable to read %s", fn)
436 raise
437
438 # Display history
439 with closing(StringIO()) as env:
440 self.data.inchistory.emit(env)
441 logger.plain(env.getvalue())
442
443 # emit variables and shell functions
444 data.update_data(envdata)
445 with closing(StringIO()) as env:
446 data.emit_env(env, envdata, True)
447 logger.plain(env.getvalue())
448
449 # emit the metadata which isnt valid shell
450 data.expandKeys(envdata)
451 for e in envdata.keys():
452 if data.getVarFlag( e, 'python', envdata ):
453 logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
454
455
456 def buildTaskData(self, pkgs_to_build, task, abort):
457 """
458 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
459 """
460 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
461
462 # A task of None means use the default task
463 if task is None:
464 task = self.configuration.cmd
465
466 fulltargetlist = self.checkPackages(pkgs_to_build)
467
468 localdata = data.createCopy(self.data)
469 bb.data.update_data(localdata)
470 bb.data.expandKeys(localdata)
471 taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist)
472
473 current = 0
474 runlist = []
475 for k in fulltargetlist:
476 ktask = task
477 if ":do_" in k:
478 k2 = k.split(":do_")
479 k = k2[0]
480 ktask = k2[1]
481 taskdata.add_provider(localdata, self.recipecache, k)
482 current += 1
483 runlist.append([k, "do_%s" % ktask])
484 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
485 taskdata.add_unresolved(localdata, self.recipecache)
486 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
487 return taskdata, runlist, fulltargetlist
488
489 def prepareTreeData(self, pkgs_to_build, task):
490 """
491 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
492 """
493
494 # We set abort to False here to prevent unbuildable targets raising
495 # an exception when we're just generating data
496 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
497
498 return runlist, taskdata
499
500 ######## WARNING : this function requires cache_extra to be enabled ########
501
502 def generateTaskDepTreeData(self, pkgs_to_build, task):
503 """
504 Create a dependency graph of pkgs_to_build including reverse dependency
505 information.
506 """
507 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
508 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
509 rq.rqdata.prepare()
510 return self.buildDependTree(rq, taskdata)
511
512
513 def buildDependTree(self, rq, taskdata):
514 seen_fnids = []
515 depend_tree = {}
516 depend_tree["depends"] = {}
517 depend_tree["tdepends"] = {}
518 depend_tree["pn"] = {}
519 depend_tree["rdepends-pn"] = {}
520 depend_tree["packages"] = {}
521 depend_tree["rdepends-pkg"] = {}
522 depend_tree["rrecs-pkg"] = {}
523 depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
524
525 for task in xrange(len(rq.rqdata.runq_fnid)):
526 taskname = rq.rqdata.runq_task[task]
527 fnid = rq.rqdata.runq_fnid[task]
528 fn = taskdata.fn_index[fnid]
529 pn = self.recipecache.pkg_fn[fn]
530 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
531 if pn not in depend_tree["pn"]:
532 depend_tree["pn"][pn] = {}
533 depend_tree["pn"][pn]["filename"] = fn
534 depend_tree["pn"][pn]["version"] = version
535 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
536
537 # if we have extra caches, list all attributes they bring in
538 extra_info = []
539 for cache_class in self.caches_array:
540 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
541 cachefields = getattr(cache_class, 'cachefields', [])
542 extra_info = extra_info + cachefields
543
544 # for all attributes stored, add them to the dependency tree
545 for ei in extra_info:
546 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
547
548
549 for dep in rq.rqdata.runq_depends[task]:
550 depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
551 deppn = self.recipecache.pkg_fn[depfn]
552 dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
553 if not dotname in depend_tree["tdepends"]:
554 depend_tree["tdepends"][dotname] = []
555 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
556 if fnid not in seen_fnids:
557 seen_fnids.append(fnid)
558 packages = []
559
560 depend_tree["depends"][pn] = []
561 for dep in taskdata.depids[fnid]:
562 depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
563
564 depend_tree["rdepends-pn"][pn] = []
565 for rdep in taskdata.rdepids[fnid]:
566 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
567
568 rdepends = self.recipecache.rundeps[fn]
569 for package in rdepends:
570 depend_tree["rdepends-pkg"][package] = []
571 for rdepend in rdepends[package]:
572 depend_tree["rdepends-pkg"][package].append(rdepend)
573 packages.append(package)
574
575 rrecs = self.recipecache.runrecs[fn]
576 for package in rrecs:
577 depend_tree["rrecs-pkg"][package] = []
578 for rdepend in rrecs[package]:
579 depend_tree["rrecs-pkg"][package].append(rdepend)
580 if not package in packages:
581 packages.append(package)
582
583 for package in packages:
584 if package not in depend_tree["packages"]:
585 depend_tree["packages"][package] = {}
586 depend_tree["packages"][package]["pn"] = pn
587 depend_tree["packages"][package]["filename"] = fn
588 depend_tree["packages"][package]["version"] = version
589
590 return depend_tree
591
592 ######## WARNING : this function requires cache_extra to be enabled ########
593 def generatePkgDepTreeData(self, pkgs_to_build, task):
594 """
595 Create a dependency tree of pkgs_to_build, returning the data.
596 """
597 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
598 tasks_fnid = []
599 if len(taskdata.tasks_name) != 0:
600 for task in xrange(len(taskdata.tasks_name)):
601 tasks_fnid.append(taskdata.tasks_fnid[task])
602
603 seen_fnids = []
604 depend_tree = {}
605 depend_tree["depends"] = {}
606 depend_tree["pn"] = {}
607 depend_tree["rdepends-pn"] = {}
608 depend_tree["rdepends-pkg"] = {}
609 depend_tree["rrecs-pkg"] = {}
610
611 # if we have extra caches, list all attributes they bring in
612 extra_info = []
613 for cache_class in self.caches_array:
614 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
615 cachefields = getattr(cache_class, 'cachefields', [])
616 extra_info = extra_info + cachefields
617
618 for task in xrange(len(tasks_fnid)):
619 fnid = tasks_fnid[task]
620 fn = taskdata.fn_index[fnid]
621 pn = self.recipecache.pkg_fn[fn]
622
623 if pn not in depend_tree["pn"]:
624 depend_tree["pn"][pn] = {}
625 depend_tree["pn"][pn]["filename"] = fn
626 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
627 depend_tree["pn"][pn]["version"] = version
628 rdepends = self.recipecache.rundeps[fn]
629 rrecs = self.recipecache.runrecs[fn]
630 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
631
632 # for all extra attributes stored, add them to the dependency tree
633 for ei in extra_info:
634 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
635
636 if fnid not in seen_fnids:
637 seen_fnids.append(fnid)
638
639 depend_tree["depends"][pn] = []
640 for dep in taskdata.depids[fnid]:
641 item = taskdata.build_names_index[dep]
642 pn_provider = ""
643 targetid = taskdata.getbuild_id(item)
644 if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
645 id = taskdata.build_targets[targetid][0]
646 fn_provider = taskdata.fn_index[id]
647 pn_provider = self.recipecache.pkg_fn[fn_provider]
648 else:
649 pn_provider = item
650 depend_tree["depends"][pn].append(pn_provider)
651
652 depend_tree["rdepends-pn"][pn] = []
653 for rdep in taskdata.rdepids[fnid]:
654 item = taskdata.run_names_index[rdep]
655 pn_rprovider = ""
656 targetid = taskdata.getrun_id(item)
657 if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
658 id = taskdata.run_targets[targetid][0]
659 fn_rprovider = taskdata.fn_index[id]
660 pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
661 else:
662 pn_rprovider = item
663 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
664
665 depend_tree["rdepends-pkg"].update(rdepends)
666 depend_tree["rrecs-pkg"].update(rrecs)
667
668 return depend_tree
669
670 def generateDepTreeEvent(self, pkgs_to_build, task):
671 """
672 Create a task dependency graph of pkgs_to_build.
673 Generate an event with the result
674 """
675 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
676 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
677
678 def generateDotGraphFiles(self, pkgs_to_build, task):
679 """
680 Create a task dependency graph of pkgs_to_build.
681 Save the result to a set of .dot files.
682 """
683
684 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
685
686 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
687 depends_file = file('pn-depends.dot', 'w' )
688 buildlist_file = file('pn-buildlist', 'w' )
689 print("digraph depends {", file=depends_file)
690 for pn in depgraph["pn"]:
691 fn = depgraph["pn"][pn]["filename"]
692 version = depgraph["pn"][pn]["version"]
693 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
694 print("%s" % pn, file=buildlist_file)
695 buildlist_file.close()
696 logger.info("PN build list saved to 'pn-buildlist'")
697 for pn in depgraph["depends"]:
698 for depend in depgraph["depends"][pn]:
699 print('"%s" -> "%s"' % (pn, depend), file=depends_file)
700 for pn in depgraph["rdepends-pn"]:
701 for rdepend in depgraph["rdepends-pn"][pn]:
702 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
703 print("}", file=depends_file)
704 logger.info("PN dependencies saved to 'pn-depends.dot'")
705
706 depends_file = file('package-depends.dot', 'w' )
707 print("digraph depends {", file=depends_file)
708 for package in depgraph["packages"]:
709 pn = depgraph["packages"][package]["pn"]
710 fn = depgraph["packages"][package]["filename"]
711 version = depgraph["packages"][package]["version"]
712 if package == pn:
713 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
714 else:
715 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
716 for depend in depgraph["depends"][pn]:
717 print('"%s" -> "%s"' % (package, depend), file=depends_file)
718 for package in depgraph["rdepends-pkg"]:
719 for rdepend in depgraph["rdepends-pkg"][package]:
720 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
721 for package in depgraph["rrecs-pkg"]:
722 for rdepend in depgraph["rrecs-pkg"][package]:
723 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
724 print("}", file=depends_file)
725 logger.info("Package dependencies saved to 'package-depends.dot'")
726
727 tdepends_file = file('task-depends.dot', 'w' )
728 print("digraph depends {", file=tdepends_file)
729 for task in depgraph["tdepends"]:
730 (pn, taskname) = task.rsplit(".", 1)
731 fn = depgraph["pn"][pn]["filename"]
732 version = depgraph["pn"][pn]["version"]
733 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskn