summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/COW.py323
-rw-r--r--bitbake/lib/bb/__init__.py143
-rw-r--r--bitbake/lib/bb/build.py709
-rw-r--r--bitbake/lib/bb/cache.py847
-rw-r--r--bitbake/lib/bb/cache_extra.py75
-rw-r--r--bitbake/lib/bb/checksum.py90
-rw-r--r--bitbake/lib/bb/codeparser.py328
-rw-r--r--bitbake/lib/bb/command.py444
-rw-r--r--bitbake/lib/bb/compat.py6
-rw-r--r--bitbake/lib/bb/cooker.py1874
-rw-r--r--bitbake/lib/bb/cookerdata.py305
-rw-r--r--bitbake/lib/bb/daemonize.py190
-rw-r--r--bitbake/lib/bb/data.py403
-rw-r--r--bitbake/lib/bb/data_smart.py804
-rw-r--r--bitbake/lib/bb/event.py641
-rw-r--r--bitbake/lib/bb/exceptions.py91
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1575
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py143
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py171
-rw-r--r--bitbake/lib/bb/fetch2/git.py355
-rw-r--r--bitbake/lib/bb/fetch2/gitannex.py76
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py126
-rw-r--r--bitbake/lib/bb/fetch2/hg.py187
-rw-r--r--bitbake/lib/bb/fetch2/local.py116
-rw-r--r--bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py194
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py129
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py127
-rw-r--r--bitbake/lib/bb/fetch2/svn.py191
-rw-r--r--bitbake/lib/bb/fetch2/wget.py106
-rw-r--r--bitbake/lib/bb/methodpool.py29
-rw-r--r--bitbake/lib/bb/monitordisk.py265
-rw-r--r--bitbake/lib/bb/msg.py196
-rw-r--r--bitbake/lib/bb/namedtuple_with_abc.py255
-rw-r--r--bitbake/lib/bb/parse/__init__.py157
-rw-r--r--bitbake/lib/bb/parse/ast.py478
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py267
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py189
-rw-r--r--bitbake/lib/bb/parse/parse_py/__init__.py33
-rw-r--r--bitbake/lib/bb/persist_data.py215
-rw-r--r--bitbake/lib/bb/process.py133
-rw-r--r--bitbake/lib/bb/providers.py381
-rw-r--r--bitbake/lib/bb/pysh/__init__.py0
-rw-r--r--bitbake/lib/bb/pysh/builtin.py710
-rw-r--r--bitbake/lib/bb/pysh/interp.py1367
-rw-r--r--bitbake/lib/bb/pysh/lsprof.py116
-rw-r--r--bitbake/lib/bb/pysh/pysh.py167
-rw-r--r--bitbake/lib/bb/pysh/pyshlex.py888
-rw-r--r--bitbake/lib/bb/pysh/pyshyacc.py779
-rw-r--r--bitbake/lib/bb/pysh/sherrors.py41
-rw-r--r--bitbake/lib/bb/pysh/subprocess_fix.py77
-rw-r--r--bitbake/lib/bb/runqueue.py2154
-rw-r--r--bitbake/lib/bb/server/__init__.py96
-rw-r--r--bitbake/lib/bb/server/process.py236
-rw-r--r--bitbake/lib/bb/server/xmlrpc.py392
-rw-r--r--bitbake/lib/bb/shell.py820
-rw-r--r--bitbake/lib/bb/siggen.py483
-rw-r--r--bitbake/lib/bb/taskdata.py651
-rw-r--r--bitbake/lib/bb/tests/__init__.py0
-rw-r--r--bitbake/lib/bb/tests/codeparser.py375
-rw-r--r--bitbake/lib/bb/tests/cow.py136
-rw-r--r--bitbake/lib/bb/tests/data.py296
-rw-r--r--bitbake/lib/bb/tests/fetch.py562
-rw-r--r--bitbake/lib/bb/tests/utils.py53
-rw-r--r--bitbake/lib/bb/tinfoil.py96
-rw-r--r--bitbake/lib/bb/ui/__init__.py17
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py964
-rw-r--r--bitbake/lib/bb/ui/crumbs/__init__.py17
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/builddetailspage.py437
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/builder.py1475
-rw-r--r--bitbake/lib/bb/ui/crumbs/buildmanager.py455
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/__init__.py0
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py341
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py44
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py70
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py219
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py172
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py298
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py163
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/propertydialog.py437
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py90
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py51
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py159
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py122
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py894
-rw-r--r--bitbake/lib/bb/ui/crumbs/hobcolor.py38
-rw-r--r--bitbake/lib/bb/ui/crumbs/hobeventhandler.py639
-rw-r--r--bitbake/lib/bb/ui/crumbs/hoblistmodel.py903
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/hobpages.py128
-rw-r--r--bitbake/lib/bb/ui/crumbs/hobwidget.py904
-rw-r--r--bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py561
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/imagedetailspage.py669
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/packageselectionpage.py355
-rw-r--r--bitbake/lib/bb/ui/crumbs/persistenttooltip.py186
-rw-r--r--bitbake/lib/bb/ui/crumbs/progress.py23
-rw-r--r--bitbake/lib/bb/ui/crumbs/progressbar.py59
-rw-r--r--bitbake/lib/bb/ui/crumbs/puccho.glade606
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/recipeselectionpage.py335
-rw-r--r--bitbake/lib/bb/ui/crumbs/runningbuild.py551
-rw-r--r--bitbake/lib/bb/ui/crumbs/sanitycheckpage.py85
-rw-r--r--bitbake/lib/bb/ui/crumbs/utils.py34
-rw-r--r--bitbake/lib/bb/ui/depexp.py326
-rw-r--r--bitbake/lib/bb/ui/goggle.py121
-rwxr-xr-xbitbake/lib/bb/ui/hob.py109
-rw-r--r--bitbake/lib/bb/ui/icons/images/images_display.pngbin0 -> 6898 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/images/images_hover.pngbin0 -> 7051 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/add-hover.pngbin0 -> 1212 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/add.pngbin0 -> 1176 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/alert.pngbin0 -> 3954 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/confirmation.pngbin0 -> 5789 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/denied.pngbin0 -> 3955 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/error.pngbin0 -> 6482 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/info.pngbin0 -> 3311 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/issues.pngbin0 -> 4549 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/refresh.pngbin0 -> 5250 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/remove-hover.pngbin0 -> 2809 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/remove.pngbin0 -> 1971 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/tick.pngbin0 -> 4563 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/info/info_display.pngbin0 -> 4117 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/info/info_hover.pngbin0 -> 4167 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/layers/layers_display.pngbin0 -> 4840 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/layers/layers_hover.pngbin0 -> 5257 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/packages/packages_display.pngbin0 -> 7011 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/packages/packages_hover.pngbin0 -> 7121 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/recipe/recipe_display.pngbin0 -> 4723 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/recipe/recipe_hover.pngbin0 -> 4866 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/settings/settings_display.pngbin0 -> 6076 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/settings/settings_hover.pngbin0 -> 6269 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/templates/templates_display.pngbin0 -> 5651 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/templates/templates_hover.pngbin0 -> 5791 bytes
-rw-r--r--bitbake/lib/bb/ui/knotty.py550
-rw-r--r--bitbake/lib/bb/ui/ncurses.py373
-rw-r--r--bitbake/lib/bb/ui/puccho.py425
-rw-r--r--bitbake/lib/bb/ui/toasterui.py292
-rw-r--r--bitbake/lib/bb/ui/uievent.py133
-rw-r--r--bitbake/lib/bb/ui/uihelper.py100
-rw-r--r--bitbake/lib/bb/utils.py878
138 files changed, 39903 insertions, 0 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
new file mode 100644
index 0000000000..6917ec378a
--- /dev/null
+++ b/bitbake/lib/bb/COW.py
@@ -0,0 +1,323 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
5#
6# Copyright (C) 2006 Tim Amsell
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21#Please Note:
22# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
23# Assign a file to __warn__ to get warnings about slow operations.
24#
25
26from __future__ import print_function
27import copy
28import types
29ImmutableTypes = (
30 types.NoneType,
31 bool,
32 complex,
33 float,
34 int,
35 long,
36 tuple,
37 frozenset,
38 basestring
39)
40
41MUTABLE = "__mutable__"
42
43class COWMeta(type):
44 pass
45
46class COWDictMeta(COWMeta):
47 __warn__ = False
48 __hasmutable__ = False
49 __marker__ = tuple()
50
51 def __str__(cls):
52 # FIXME: I have magic numbers!
53 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
54 __repr__ = __str__
55
56 def cow(cls):
57 class C(cls):
58 __count__ = cls.__count__ + 1
59 return C
60 copy = cow
61 __call__ = cow
62
63 def __setitem__(cls, key, value):
64 if not isinstance(value, ImmutableTypes):
65 if not isinstance(value, COWMeta):
66 cls.__hasmutable__ = True
67 key += MUTABLE
68 setattr(cls, key, value)
69
70 def __getmutable__(cls, key, readonly=False):
71 nkey = key + MUTABLE
72 try:
73 return cls.__dict__[nkey]
74 except KeyError:
75 pass
76
77 value = getattr(cls, nkey)
78 if readonly:
79 return value
80
81 if not cls.__warn__ is False and not isinstance(value, COWMeta):
82 print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__)
83 try:
84 value = value.copy()
85 except AttributeError as e:
86 value = copy.copy(value)
87 setattr(cls, nkey, value)
88 return value
89
90 __getmarker__ = []
91 def __getreadonly__(cls, key, default=__getmarker__):
92 """\
93 Get a value (even if mutable) which you promise not to change.
94 """
95 return cls.__getitem__(key, default, True)
96
97 def __getitem__(cls, key, default=__getmarker__, readonly=False):
98 try:
99 try:
100 value = getattr(cls, key)
101 except AttributeError:
102 value = cls.__getmutable__(key, readonly)
103
104 # This is for values which have been deleted
105 if value is cls.__marker__:
106 raise AttributeError("key %s does not exist." % key)
107
108 return value
109 except AttributeError as e:
110 if not default is cls.__getmarker__:
111 return default
112
113 raise KeyError(str(e))
114
115 def __delitem__(cls, key):
116 cls.__setitem__(key, cls.__marker__)
117
118 def __revertitem__(cls, key):
119 if not cls.__dict__.has_key(key):
120 key += MUTABLE
121 delattr(cls, key)
122
123 def __contains__(cls, key):
124 return cls.has_key(key)
125
126 def has_key(cls, key):
127 value = cls.__getreadonly__(key, cls.__marker__)
128 if value is cls.__marker__:
129 return False
130 return True
131
132 def iter(cls, type, readonly=False):
133 for key in dir(cls):
134 if key.startswith("__"):
135 continue
136
137 if key.endswith(MUTABLE):
138 key = key[:-len(MUTABLE)]
139
140 if type == "keys":
141 yield key
142
143 try:
144 if readonly:
145 value = cls.__getreadonly__(key)
146 else:
147 value = cls[key]
148 except KeyError:
149 continue
150
151 if type == "values":
152 yield value
153 if type == "items":
154 yield (key, value)
155 raise StopIteration()
156
157 def iterkeys(cls):
158 return cls.iter("keys")
159 def itervalues(cls, readonly=False):
160 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
161 print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
162 return cls.iter("values", readonly)
163 def iteritems(cls, readonly=False):
164 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
165 print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
166 return cls.iter("items", readonly)
167
168class COWSetMeta(COWDictMeta):
169 def __str__(cls):
170 # FIXME: I have magic numbers!
171 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
172 __repr__ = __str__
173
174 def cow(cls):
175 class C(cls):
176 __count__ = cls.__count__ + 1
177 return C
178
179 def add(cls, value):
180 COWDictMeta.__setitem__(cls, repr(hash(value)), value)
181
182 def remove(cls, value):
183 COWDictMeta.__delitem__(cls, repr(hash(value)))
184
185 def __in__(cls, value):
186 return COWDictMeta.has_key(repr(hash(value)))
187
188 def iterkeys(cls):
189 raise TypeError("sets don't have keys")
190
191 def iteritems(cls):
192 raise TypeError("sets don't have 'items'")
193
194# These are the actual classes you use!
195class COWDictBase(object):
196 __metaclass__ = COWDictMeta
197 __count__ = 0
198
199class COWSetBase(object):
200 __metaclass__ = COWSetMeta
201 __count__ = 0
202
203if __name__ == "__main__":
204 import sys
205 COWDictBase.__warn__ = sys.stderr
206 a = COWDictBase()
207 print("a", a)
208
209 a['a'] = 'a'
210 a['b'] = 'b'
211 a['dict'] = {}
212
213 b = a.copy()
214 print("b", b)
215 b['c'] = 'b'
216
217 print()
218
219 print("a", a)
220 for x in a.iteritems():
221 print(x)
222 print("--")
223 print("b", b)
224 for x in b.iteritems():
225 print(x)
226 print()
227
228 b['dict']['a'] = 'b'
229 b['a'] = 'c'
230
231 print("a", a)
232 for x in a.iteritems():
233 print(x)
234 print("--")
235 print("b", b)
236 for x in b.iteritems():
237 print(x)
238 print()
239
240 try:
241 b['dict2']
242 except KeyError as e:
243 print("Okay!")
244
245 a['set'] = COWSetBase()
246 a['set'].add("o1")
247 a['set'].add("o1")
248 a['set'].add("o2")
249
250 print("a", a)
251 for x in a['set'].itervalues():
252 print(x)
253 print("--")
254 print("b", b)
255 for x in b['set'].itervalues():
256 print(x)
257 print()
258
259 b['set'].add('o3')
260
261 print("a", a)
262 for x in a['set'].itervalues():
263 print(x)
264 print("--")
265 print("b", b)
266 for x in b['set'].itervalues():
267 print(x)
268 print()
269
270 a['set2'] = set()
271 a['set2'].add("o1")
272 a['set2'].add("o1")
273 a['set2'].add("o2")
274
275 print("a", a)
276 for x in a.iteritems():
277 print(x)
278 print("--")
279 print("b", b)
280 for x in b.iteritems(readonly=True):
281 print(x)
282 print()
283
284 del b['b']
285 try:
286 print(b['b'])
287 except KeyError:
288 print("Yay! deleted key raises error")
289
290 if b.has_key('b'):
291 print("Boo!")
292 else:
293 print("Yay - has_key with delete works!")
294
295 print("a", a)
296 for x in a.iteritems():
297 print(x)
298 print("--")
299 print("b", b)
300 for x in b.iteritems(readonly=True):
301 print(x)
302 print()
303
304 b.__revertitem__('b')
305
306 print("a", a)
307 for x in a.iteritems():
308 print(x)
309 print("--")
310 print("b", b)
311 for x in b.iteritems(readonly=True):
312 print(x)
313 print()
314
315 b.__revertitem__('dict')
316 print("a", a)
317 for x in a.iteritems():
318 print(x)
319 print("--")
320 print("b", b)
321 for x in b.iteritems(readonly=True):
322 print(x)
323 print()
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
new file mode 100644
index 0000000000..30a974582c
--- /dev/null
+++ b/bitbake/lib/bb/__init__.py
@@ -0,0 +1,143 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Build System Python Library
5#
6# Copyright (C) 2003 Holger Schurig
7# Copyright (C) 2003, 2004 Chris Larson
8#
9# Based on Gentoo's portage.py.
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24__version__ = "1.23.0"
25
26import sys
27if sys.version_info < (2, 7, 3):
28 raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake")
29
30
31class BBHandledException(Exception):
32 """
33 The big dilemma for generic bitbake code is what information to give the user
34 when an exception occurs. Any exception inheriting this base exception class
35 has already provided information to the user via some 'fired' message type such as
36 an explicitly fired event using bb.fire, or a bb.error message. If bitbake
37 encounters an exception derived from this class, no backtrace or other information
38 will be given to the user, its assumed the earlier event provided the relevant information.
39 """
40 pass
41
42import os
43import logging
44
45
46class NullHandler(logging.Handler):
47 def emit(self, record):
48 pass
49
50Logger = logging.getLoggerClass()
51class BBLogger(Logger):
52 def __init__(self, name):
53 if name.split(".")[0] == "BitBake":
54 self.debug = self.bbdebug
55 Logger.__init__(self, name)
56
57 def bbdebug(self, level, msg, *args, **kwargs):
58 return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs)
59
60 def plain(self, msg, *args, **kwargs):
61 return self.log(logging.INFO + 1, msg, *args, **kwargs)
62
63 def verbose(self, msg, *args, **kwargs):
64 return self.log(logging.INFO - 1, msg, *args, **kwargs)
65
66logging.raiseExceptions = False
67logging.setLoggerClass(BBLogger)
68
69logger = logging.getLogger("BitBake")
70logger.addHandler(NullHandler())
71logger.setLevel(logging.DEBUG - 2)
72
73# This has to be imported after the setLoggerClass, as the import of bb.msg
74# can result in construction of the various loggers.
75import bb.msg
76
77from bb import fetch2 as fetch
78sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
79
80# Messaging convenience functions
81def plain(*args):
82 logger.plain(''.join(args))
83
84def debug(lvl, *args):
85 if isinstance(lvl, basestring):
86 logger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
87 args = (lvl,) + args
88 lvl = 1
89 logger.debug(lvl, ''.join(args))
90
91def note(*args):
92 logger.info(''.join(args))
93
94def warn(*args):
95 logger.warn(''.join(args))
96
97def error(*args):
98 logger.error(''.join(args))
99
100def fatal(*args):
101 logger.critical(''.join(args))
102 sys.exit(1)
103
104
105def deprecated(func, name=None, advice=""):
106 """This is a decorator which can be used to mark functions
107 as deprecated. It will result in a warning being emmitted
108 when the function is used."""
109 import warnings
110
111 if advice:
112 advice = ": %s" % advice
113 if name is None:
114 name = func.__name__
115
116 def newFunc(*args, **kwargs):
117 warnings.warn("Call to deprecated function %s%s." % (name,
118 advice),
119 category=DeprecationWarning,
120 stacklevel=2)
121 return func(*args, **kwargs)
122 newFunc.__name__ = func.__name__
123 newFunc.__doc__ = func.__doc__
124 newFunc.__dict__.update(func.__dict__)
125 return newFunc
126
127# For compatibility
128def deprecate_import(current, modulename, fromlist, renames = None):
129 """Import objects from one module into another, wrapping them with a DeprecationWarning"""
130 import sys
131
132 module = __import__(modulename, fromlist = fromlist)
133 for position, objname in enumerate(fromlist):
134 obj = getattr(module, objname)
135 newobj = deprecated(obj, "{0}.{1}".format(current, objname),
136 "Please use {0}.{1} instead".format(modulename, objname))
137 if renames:
138 newname = renames[position]
139 else:
140 newname = objname
141
142 setattr(sys.modules[current], newname, newobj)
143
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
new file mode 100644
index 0000000000..5cb4c06a88
--- /dev/null
+++ b/bitbake/lib/bb/build.py
@@ -0,0 +1,709 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake 'Build' implementation
5#
6# Core code for function execution and task handling in the
7# BitBake build tools.
8#
9# Copyright (C) 2003, 2004 Chris Larson
10#
11# Based on Gentoo's portage.py.
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import sys
30import logging
31import shlex
32import glob
33import time
34import bb
35import bb.msg
36import bb.process
37from contextlib import nested
38from bb import event, utils
39
40bblogger = logging.getLogger('BitBake')
41logger = logging.getLogger('BitBake.Build')
42
43NULL = open(os.devnull, 'r+')
44
45
46# When we execute a python function we'd like certain things
47# in all namespaces, hence we add them to __builtins__
48# If we do not do this and use the exec globals, they will
49# not be available to subfunctions.
50__builtins__['bb'] = bb
51__builtins__['os'] = os
52
53class FuncFailed(Exception):
54 def __init__(self, name = None, logfile = None):
55 self.logfile = logfile
56 self.name = name
57 if name:
58 self.msg = 'Function failed: %s' % name
59 else:
60 self.msg = "Function failed"
61
62 def __str__(self):
63 if self.logfile and os.path.exists(self.logfile):
64 msg = ("%s (log file is located at %s)" %
65 (self.msg, self.logfile))
66 else:
67 msg = self.msg
68 return msg
69
70class TaskBase(event.Event):
71 """Base class for task events"""
72
73 def __init__(self, t, logfile, d):
74 self._task = t
75 self._package = d.getVar("PF", True)
76 self.taskfile = d.getVar("FILE", True)
77 self.taskname = self._task
78 self.logfile = logfile
79 self.time = time.time()
80 event.Event.__init__(self)
81 self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName())
82
83 def getTask(self):
84 return self._task
85
86 def setTask(self, task):
87 self._task = task
88
89 def getDisplayName(self):
90 return bb.event.getName(self)[4:]
91
92 task = property(getTask, setTask, None, "task property")
93
94class TaskStarted(TaskBase):
95 """Task execution started"""
96 def __init__(self, t, logfile, taskflags, d):
97 super(TaskStarted, self).__init__(t, logfile, d)
98 self.taskflags = taskflags
99
100class TaskSucceeded(TaskBase):
101 """Task execution completed"""
102
103class TaskFailed(TaskBase):
104 """Task execution failed"""
105
106 def __init__(self, task, logfile, metadata, errprinted = False):
107 self.errprinted = errprinted
108 super(TaskFailed, self).__init__(task, logfile, metadata)
109
110class TaskFailedSilent(TaskBase):
111 """Task execution failed (silently)"""
112 def getDisplayName(self):
113 # Don't need to tell the user it was silent
114 return "Failed"
115
116class TaskInvalid(TaskBase):
117
118 def __init__(self, task, metadata):
119 super(TaskInvalid, self).__init__(task, None, metadata)
120 self._message = "No such task '%s'" % task
121
122
123class LogTee(object):
124 def __init__(self, logger, outfile):
125 self.outfile = outfile
126 self.logger = logger
127 self.name = self.outfile.name
128
129 def write(self, string):
130 self.logger.plain(string)
131 self.outfile.write(string)
132
133 def __enter__(self):
134 self.outfile.__enter__()
135 return self
136
137 def __exit__(self, *excinfo):
138 self.outfile.__exit__(*excinfo)
139
140 def __repr__(self):
141 return '<LogTee {0}>'.format(self.name)
142 def flush(self):
143 self.outfile.flush()
144
145def exec_func(func, d, dirs = None):
146 """Execute an BB 'function'"""
147
148 body = d.getVar(func)
149 if not body:
150 if body is None:
151 logger.warn("Function %s doesn't exist", func)
152 return
153
154 flags = d.getVarFlags(func)
155 cleandirs = flags.get('cleandirs')
156 if cleandirs:
157 for cdir in d.expand(cleandirs).split():
158 bb.utils.remove(cdir, True)
159 bb.utils.mkdirhier(cdir)
160
161 if dirs is None:
162 dirs = flags.get('dirs')
163 if dirs:
164 dirs = d.expand(dirs).split()
165
166 if dirs:
167 for adir in dirs:
168 bb.utils.mkdirhier(adir)
169 adir = dirs[-1]
170 else:
171 adir = d.getVar('B', True)
172 bb.utils.mkdirhier(adir)
173
174 ispython = flags.get('python')
175
176 lockflag = flags.get('lockfiles')
177 if lockflag:
178 lockfiles = [f for f in d.expand(lockflag).split()]
179 else:
180 lockfiles = None
181
182 tempdir = d.getVar('T', True)
183
184 # or func allows items to be executed outside of the normal
185 # task set, such as buildhistory
186 task = d.getVar('BB_RUNTASK', True) or func
187 if task == func:
188 taskfunc = task
189 else:
190 taskfunc = "%s.%s" % (task, func)
191
192 runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}"
193 runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
194 runfile = os.path.join(tempdir, runfn)
195 bb.utils.mkdirhier(os.path.dirname(runfile))
196
197 # Setup the courtesy link to the runfn, only for tasks
198 # we create the link 'just' before the run script is created
199 # if we create it after, and if the run script fails, then the
200 # link won't be created as an exception would be fired.
201 if task == func:
202 runlink = os.path.join(tempdir, 'run.{0}'.format(task))
203 if runlink:
204 bb.utils.remove(runlink)
205
206 try:
207 os.symlink(runfn, runlink)
208 except OSError:
209 pass
210
211 with bb.utils.fileslocked(lockfiles):
212 if ispython:
213 exec_func_python(func, d, runfile, cwd=adir)
214 else:
215 exec_func_shell(func, d, runfile, cwd=adir)
216
217_functionfmt = """
218def {function}(d):
219{body}
220
221{function}(d)
222"""
223logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
224def exec_func_python(func, d, runfile, cwd=None):
225 """Execute a python BB 'function'"""
226
227 bbfile = d.getVar('FILE', True)
228 code = _functionfmt.format(function=func, body=d.getVar(func, True))
229 bb.utils.mkdirhier(os.path.dirname(runfile))
230 with open(runfile, 'w') as script:
231 script.write(code)
232
233 if cwd:
234 try:
235 olddir = os.getcwd()
236 except OSError:
237 olddir = None
238 os.chdir(cwd)
239
240 bb.debug(2, "Executing python function %s" % func)
241
242 try:
243 comp = utils.better_compile(code, func, bbfile)
244 utils.better_exec(comp, {"d": d}, code, bbfile)
245 except:
246 if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
247 raise
248
249 raise FuncFailed(func, None)
250 finally:
251 bb.debug(2, "Python function %s finished" % func)
252
253 if cwd and olddir:
254 try:
255 os.chdir(olddir)
256 except OSError:
257 pass
258
259def shell_trap_code():
260 return '''#!/bin/sh\n
261# Emit a useful diagnostic if something fails:
262bb_exit_handler() {
263 ret=$?
264 case $ret in
265 0) ;;
266 *) case $BASH_VERSION in
267 "") echo "WARNING: exit code $ret from a shell command.";;
268 *) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from
269 \"$BASH_COMMAND\"";;
270 esac
271 exit $ret
272 esac
273}
274trap 'bb_exit_handler' 0
275set -e
276'''
277
278def exec_func_shell(func, d, runfile, cwd=None):
279 """Execute a shell function from the metadata
280
281 Note on directory behavior. The 'dirs' varflag should contain a list
282 of the directories you need created prior to execution. The last
283 item in the list is where we will chdir/cd to.
284 """
285
286 # Don't let the emitted shell script override PWD
287 d.delVarFlag('PWD', 'export')
288
289 with open(runfile, 'w') as script:
290 script.write(shell_trap_code())
291
292 bb.data.emit_func(func, script, d)
293
294 if bb.msg.loggerVerboseLogs:
295 script.write("set -x\n")
296 if cwd:
297 script.write("cd '%s'\n" % cwd)
298 script.write("%s\n" % func)
299 script.write('''
300# cleanup
301ret=$?
302trap '' 0
303exit $?
304''')
305
306 os.chmod(runfile, 0775)
307
308 cmd = runfile
309 if d.getVarFlag(func, 'fakeroot'):
310 fakerootcmd = d.getVar('FAKEROOT', True)
311 if fakerootcmd:
312 cmd = [fakerootcmd, runfile]
313
314 if bb.msg.loggerDefaultVerbose:
315 logfile = LogTee(logger, sys.stdout)
316 else:
317 logfile = sys.stdout
318
319 bb.debug(2, "Executing shell function %s" % func)
320
321 try:
322 with open(os.devnull, 'r+') as stdin:
323 bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
324 except bb.process.CmdError:
325 logfn = d.getVar('BB_LOGFILE', True)
326 raise FuncFailed(func, logfn)
327
328 bb.debug(2, "Shell function %s finished" % func)
329
330def _task_data(fn, task, d):
331 localdata = bb.data.createCopy(d)
332 localdata.setVar('BB_FILENAME', fn)
333 localdata.setVar('BB_CURRENTTASK', task[3:])
334 localdata.setVar('OVERRIDES', 'task-%s:%s' %
335 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
336 localdata.finalize()
337 bb.data.expandKeys(localdata)
338 return localdata
339
340def _exec_task(fn, task, d, quieterr):
341 """Execute a BB 'task'
342
343 Execution of a task involves a bit more setup than executing a function,
344 running it with its own local metadata, and with some useful variables set.
345 """
346 if not d.getVarFlag(task, 'task'):
347 event.fire(TaskInvalid(task, d), d)
348 logger.error("No such task: %s" % task)
349 return 1
350
351 logger.debug(1, "Executing task %s", task)
352
353 localdata = _task_data(fn, task, d)
354 tempdir = localdata.getVar('T', True)
355 if not tempdir:
356 bb.fatal("T variable not set, unable to build")
357
358 # Change nice level if we're asked to
359 nice = localdata.getVar("BB_TASK_NICE_LEVEL", True)
360 if nice:
361 curnice = os.nice(0)
362 nice = int(nice) - curnice
363 newnice = os.nice(nice)
364 logger.debug(1, "Renice to %s " % newnice)
365
366 bb.utils.mkdirhier(tempdir)
367
368 # Determine the logfile to generate
369 logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}'
370 logbase = logfmt.format(task=task, pid=os.getpid())
371
372 # Document the order of the tasks...
373 logorder = os.path.join(tempdir, 'log.task_order')
374 try:
375 with open(logorder, 'a') as logorderfile:
376 logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
377 except OSError:
378 logger.exception("Opening log file '%s'", logorder)
379 pass
380
381 # Setup the courtesy link to the logfn
382 loglink = os.path.join(tempdir, 'log.{0}'.format(task))
383 logfn = os.path.join(tempdir, logbase)
384 if loglink:
385 bb.utils.remove(loglink)
386
387 try:
388 os.symlink(logbase, loglink)
389 except OSError:
390 pass
391
392 prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
393 postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
394
395 class ErrorCheckHandler(logging.Handler):
396 def __init__(self):
397 self.triggered = False
398 logging.Handler.__init__(self, logging.ERROR)
399 def emit(self, record):
400 self.triggered = True
401
402 # Handle logfiles
403 si = open('/dev/null', 'r')
404 try:
405 bb.utils.mkdirhier(os.path.dirname(logfn))
406 logfile = open(logfn, 'w')
407 except OSError:
408 logger.exception("Opening log file '%s'", logfn)
409 pass
410
411 # Dup the existing fds so we dont lose them
412 osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
413 oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
414 ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
415
416 # Replace those fds with our own
417 os.dup2(si.fileno(), osi[1])
418 os.dup2(logfile.fileno(), oso[1])
419 os.dup2(logfile.fileno(), ose[1])
420
421 # Ensure python logging goes to the logfile
422 handler = logging.StreamHandler(logfile)
423 handler.setFormatter(logformatter)
424 # Always enable full debug output into task logfiles
425 handler.setLevel(logging.DEBUG - 2)
426 bblogger.addHandler(handler)
427
428 errchk = ErrorCheckHandler()
429 bblogger.addHandler(errchk)
430
431 localdata.setVar('BB_LOGFILE', logfn)
432 localdata.setVar('BB_RUNTASK', task)
433
434 flags = localdata.getVarFlags(task)
435
436 event.fire(TaskStarted(task, logfn, flags, localdata), localdata)
437 try:
438 for func in (prefuncs or '').split():
439 exec_func(func, localdata)
440 exec_func(task, localdata)
441 for func in (postfuncs or '').split():
442 exec_func(func, localdata)
443 except FuncFailed as exc:
444 if quieterr:
445 event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
446 else:
447 errprinted = errchk.triggered
448 logger.error(str(exc))
449 event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
450 return 1
451 finally:
452 sys.stdout.flush()
453 sys.stderr.flush()
454
455 bblogger.removeHandler(handler)
456
457 # Restore the backup fds
458 os.dup2(osi[0], osi[1])
459 os.dup2(oso[0], oso[1])
460 os.dup2(ose[0], ose[1])
461
462 # Close the backup fds
463 os.close(osi[0])
464 os.close(oso[0])
465 os.close(ose[0])
466 si.close()
467
468 logfile.close()
469 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
470 logger.debug(2, "Zero size logfn %s, removing", logfn)
471 bb.utils.remove(logfn)
472 bb.utils.remove(loglink)
473 event.fire(TaskSucceeded(task, logfn, localdata), localdata)
474
475 if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'):
476 make_stamp(task, localdata)
477
478 return 0
479
480def exec_task(fn, task, d, profile = False):
481 try:
482 quieterr = False
483 if d.getVarFlag(task, "quieterrors") is not None:
484 quieterr = True
485
486 if profile:
487 profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task)
488 try:
489 import cProfile as profile
490 except:
491 import profile
492 prof = profile.Profile()
493 ret = profile.Profile.runcall(prof, _exec_task, fn, task, d, quieterr)
494 prof.dump_stats(profname)
495 bb.utils.process_profilelog(profname)
496
497 return ret
498 else:
499 return _exec_task(fn, task, d, quieterr)
500
501 except Exception:
502 from traceback import format_exc
503 if not quieterr:
504 logger.error("Build of %s failed" % (task))
505 logger.error(format_exc())
506 failedevent = TaskFailed(task, None, d, True)
507 event.fire(failedevent, d)
508 return 1
509
510def stamp_internal(taskname, d, file_name):
511 """
512 Internal stamp helper function
513 Makes sure the stamp directory exists
514 Returns the stamp path+filename
515
516 In the bitbake core, d can be a CacheData and file_name will be set.
517 When called in task context, d will be a data store, file_name will not be set
518 """
519 taskflagname = taskname
520 if taskname.endswith("_setscene") and taskname != "do_setscene":
521 taskflagname = taskname.replace("_setscene", "")
522
523 if file_name:
524 stamp = d.stamp_base[file_name].get(taskflagname) or d.stamp[file_name]
525 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
526 else:
527 stamp = d.getVarFlag(taskflagname, 'stamp-base', True) or d.getVar('STAMP', True)
528 file_name = d.getVar('BB_FILENAME', True)
529 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
530
531 if not stamp:
532 return
533
534 stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
535
536 stampdir = os.path.dirname(stamp)
537 if bb.parse.cached_mtime_noerror(stampdir) == 0:
538 bb.utils.mkdirhier(stampdir)
539
540 return stamp
541
542def stamp_cleanmask_internal(taskname, d, file_name):
543 """
544 Internal stamp helper function to generate stamp cleaning mask
545 Returns the stamp path+filename
546
547 In the bitbake core, d can be a CacheData and file_name will be set.
548 When called in task context, d will be a data store, file_name will not be set
549 """
550 taskflagname = taskname
551 if taskname.endswith("_setscene") and taskname != "do_setscene":
552 taskflagname = taskname.replace("_setscene", "")
553
554 if file_name:
555 stamp = d.stamp_base_clean[file_name].get(taskflagname) or d.stampclean[file_name]
556 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
557 else:
558 stamp = d.getVarFlag(taskflagname, 'stamp-base-clean', True) or d.getVar('STAMPCLEAN', True)
559 file_name = d.getVar('BB_FILENAME', True)
560 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
561
562 if not stamp:
563 return []
564
565 cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo)
566
567 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
568
569def make_stamp(task, d, file_name = None):
570 """
571 Creates/updates a stamp for a given task
572 (d can be a data dict or dataCache)
573 """
574 cleanmask = stamp_cleanmask_internal(task, d, file_name)
575 for mask in cleanmask:
576 for name in glob.glob(mask):
577 # Preserve sigdata files in the stamps directory
578 if "sigdata" in name:
579 continue
580 # Preserve taint files in the stamps directory
581 if name.endswith('.taint'):
582 continue
583 os.unlink(name)
584
585 stamp = stamp_internal(task, d, file_name)
586 # Remove the file and recreate to force timestamp
587 # change on broken NFS filesystems
588 if stamp:
589 bb.utils.remove(stamp)
590 open(stamp, "w").close()
591
592 # If we're in task context, write out a signature file for each task
593 # as it completes
594 if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
595 file_name = d.getVar('BB_FILENAME', True)
596 bb.parse.siggen.dump_sigtask(file_name, task, d.getVar('STAMP', True), True)
597
598def del_stamp(task, d, file_name = None):
599 """
600 Removes a stamp for a given task
601 (d can be a data dict or dataCache)
602 """
603 stamp = stamp_internal(task, d, file_name)
604 bb.utils.remove(stamp)
605
606def write_taint(task, d, file_name = None):
607 """
608 Creates a "taint" file which will force the specified task and its
609 dependents to be re-run the next time by influencing the value of its
610 taskhash.
611 (d can be a data dict or dataCache)
612 """
613 import uuid
614 if file_name:
615 taintfn = d.stamp[file_name] + '.' + task + '.taint'
616 else:
617 taintfn = d.getVar('STAMP', True) + '.' + task + '.taint'
618 bb.utils.mkdirhier(os.path.dirname(taintfn))
619 # The specific content of the taint file is not really important,
620 # we just need it to be random, so a random UUID is used
621 with open(taintfn, 'w') as taintf:
622 taintf.write(str(uuid.uuid4()))
623
624def stampfile(taskname, d, file_name = None):
625 """
626 Return the stamp for a given task
627 (d can be a data dict or dataCache)
628 """
629 return stamp_internal(taskname, d, file_name)
630
631def add_tasks(tasklist, deltasklist, d):
632 task_deps = d.getVar('_task_deps')
633 if not task_deps:
634 task_deps = {}
635 if not 'tasks' in task_deps:
636 task_deps['tasks'] = []
637 if not 'parents' in task_deps:
638 task_deps['parents'] = {}
639
640 for task in tasklist:
641 task = d.expand(task)
642
643 if task in deltasklist:
644 continue
645
646 d.setVarFlag(task, 'task', 1)
647
648 if not task in task_deps['tasks']:
649 task_deps['tasks'].append(task)
650
651 flags = d.getVarFlags(task)
652 def getTask(name):
653 if not name in task_deps:
654 task_deps[name] = {}
655 if name in flags:
656 deptask = d.expand(flags[name])
657 task_deps[name][task] = deptask
658 getTask('depends')
659 getTask('rdepends')
660 getTask('deptask')
661 getTask('rdeptask')
662 getTask('recrdeptask')
663 getTask('recideptask')
664 getTask('nostamp')
665 getTask('fakeroot')
666 getTask('noexec')
667 getTask('umask')
668 task_deps['parents'][task] = []
669 if 'deps' in flags:
670 for dep in flags['deps']:
671 dep = d.expand(dep)
672 task_deps['parents'][task].append(dep)
673
674 # don't assume holding a reference
675 d.setVar('_task_deps', task_deps)
676
677def addtask(task, before, after, d):
678 if task[:3] != "do_":
679 task = "do_" + task
680
681 d.setVarFlag(task, "task", 1)
682 bbtasks = d.getVar('__BBTASKS') or []
683 if not task in bbtasks:
684 bbtasks.append(task)
685 d.setVar('__BBTASKS', bbtasks)
686
687 existing = d.getVarFlag(task, "deps") or []
688 if after is not None:
689 # set up deps for function
690 for entry in after.split():
691 if entry not in existing:
692 existing.append(entry)
693 d.setVarFlag(task, "deps", existing)
694 if before is not None:
695 # set up things that depend on this func
696 for entry in before.split():
697 existing = d.getVarFlag(entry, "deps") or []
698 if task not in existing:
699 d.setVarFlag(entry, "deps", [task] + existing)
700
701def deltask(task, d):
702 if task[:3] != "do_":
703 task = "do_" + task
704
705 bbtasks = d.getVar('__BBDELTASKS') or []
706 if not task in bbtasks:
707 bbtasks.append(task)
708 d.setVar('__BBDELTASKS', bbtasks)
709
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
new file mode 100644
index 0000000000..431fc079e4
--- /dev/null
+++ b/bitbake/lib/bb/cache.py
@@ -0,0 +1,847 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Cache implementation
5#
6# Caching of bitbake variables before task execution
7
8# Copyright (C) 2006 Richard Purdie
9# Copyright (C) 2012 Intel Corporation
10
11# but small sections based on code from bin/bitbake:
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15# Copyright (C) 2005 Holger Hans Peter Freyther
16# Copyright (C) 2005 ROAD GmbH
17#
18# This program is free software; you can redistribute it and/or modify
19# it under the terms of the GNU General Public License version 2 as
20# published by the Free Software Foundation.
21#
22# This program is distributed in the hope that it will be useful,
23# but WITHOUT ANY WARRANTY; without even the implied warranty of
24# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25# GNU General Public License for more details.
26#
27# You should have received a copy of the GNU General Public License along
28# with this program; if not, write to the Free Software Foundation, Inc.,
29# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
30
31
32import os
33import logging
34from collections import defaultdict
35import bb.utils
36
37logger = logging.getLogger("BitBake.Cache")
38
39try:
40 import cPickle as pickle
41except ImportError:
42 import pickle
43 logger.info("Importing cPickle failed. "
44 "Falling back to a very slow implementation.")
45
46__cache_version__ = "147"
47
48def getCacheFile(path, filename, data_hash):
49 return os.path.join(path, filename + "." + data_hash)
50
51# RecipeInfoCommon defines common data retrieving methods
52# from meta data for caches. CoreRecipeInfo as well as other
53# Extra RecipeInfo needs to inherit this class
54class RecipeInfoCommon(object):
55
56 @classmethod
57 def listvar(cls, var, metadata):
58 return cls.getvar(var, metadata).split()
59
60 @classmethod
61 def intvar(cls, var, metadata):
62 return int(cls.getvar(var, metadata) or 0)
63
64 @classmethod
65 def depvar(cls, var, metadata):
66 return bb.utils.explode_deps(cls.getvar(var, metadata))
67
68 @classmethod
69 def pkgvar(cls, var, packages, metadata):
70 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
71 for pkg in packages)
72
73 @classmethod
74 def taskvar(cls, var, tasks, metadata):
75 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
76 for task in tasks)
77
78 @classmethod
79 def flaglist(cls, flag, varlist, metadata, squash=False):
80 out_dict = dict((var, metadata.getVarFlag(var, flag, True))
81 for var in varlist)
82 if squash:
83 return dict((k,v) for (k,v) in out_dict.iteritems() if v)
84 else:
85 return out_dict
86
87 @classmethod
88 def getvar(cls, var, metadata):
89 return metadata.getVar(var, True) or ''
90
91
92class CoreRecipeInfo(RecipeInfoCommon):
93 __slots__ = ()
94
95 cachefile = "bb_cache.dat"
96
97 def __init__(self, filename, metadata):
98 self.file_depends = metadata.getVar('__depends', False)
99 self.timestamp = bb.parse.cached_mtime(filename)
100 self.variants = self.listvar('__VARIANTS', metadata) + ['']
101 self.appends = self.listvar('__BBAPPEND', metadata)
102 self.nocache = self.getvar('__BB_DONT_CACHE', metadata)
103
104 self.skipreason = self.getvar('__SKIPPED', metadata)
105 if self.skipreason:
106 self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
107 self.skipped = True
108 self.provides = self.depvar('PROVIDES', metadata)
109 self.rprovides = self.depvar('RPROVIDES', metadata)
110 return
111
112 self.tasks = metadata.getVar('__BBTASKS', False)
113
114 self.pn = self.getvar('PN', metadata)
115 self.packages = self.listvar('PACKAGES', metadata)
116 if not self.pn in self.packages:
117 self.packages.append(self.pn)
118
119 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
120 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
121
122 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
123
124 self.skipped = False
125 self.pe = self.getvar('PE', metadata)
126 self.pv = self.getvar('PV', metadata)
127 self.pr = self.getvar('PR', metadata)
128 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
129 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
130 self.stamp = self.getvar('STAMP', metadata)
131 self.stampclean = self.getvar('STAMPCLEAN', metadata)
132 self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
133 self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata)
134 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
135 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
136 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
137 self.depends = self.depvar('DEPENDS', metadata)
138 self.provides = self.depvar('PROVIDES', metadata)
139 self.rdepends = self.depvar('RDEPENDS', metadata)
140 self.rprovides = self.depvar('RPROVIDES', metadata)
141 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
142 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
143 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
144 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
145 self.inherits = self.getvar('__inherit_cache', metadata)
146 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
147 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
148 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
149
150 @classmethod
151 def init_cacheData(cls, cachedata):
152 # CacheData in Core RecipeInfo Class
153 cachedata.task_deps = {}
154 cachedata.pkg_fn = {}
155 cachedata.pkg_pn = defaultdict(list)
156 cachedata.pkg_pepvpr = {}
157 cachedata.pkg_dp = {}
158
159 cachedata.stamp = {}
160 cachedata.stampclean = {}
161 cachedata.stamp_base = {}
162 cachedata.stamp_base_clean = {}
163 cachedata.stamp_extrainfo = {}
164 cachedata.file_checksums = {}
165 cachedata.fn_provides = {}
166 cachedata.pn_provides = defaultdict(list)
167 cachedata.all_depends = []
168
169 cachedata.deps = defaultdict(list)
170 cachedata.packages = defaultdict(list)
171 cachedata.providers = defaultdict(list)
172 cachedata.rproviders = defaultdict(list)
173 cachedata.packages_dynamic = defaultdict(list)
174
175 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
176 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
177 cachedata.possible_world = []
178 cachedata.universe_target = []
179 cachedata.hashfn = {}
180
181 cachedata.basetaskhash = {}
182 cachedata.inherits = {}
183 cachedata.fakerootenv = {}
184 cachedata.fakerootnoenv = {}
185 cachedata.fakerootdirs = {}
186
187 def add_cacheData(self, cachedata, fn):
188 cachedata.task_deps[fn] = self.task_deps
189 cachedata.pkg_fn[fn] = self.pn
190 cachedata.pkg_pn[self.pn].append(fn)
191 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
192 cachedata.pkg_dp[fn] = self.defaultpref
193 cachedata.stamp[fn] = self.stamp
194 cachedata.stampclean[fn] = self.stampclean
195 cachedata.stamp_base[fn] = self.stamp_base
196 cachedata.stamp_base_clean[fn] = self.stamp_base_clean
197 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
198 cachedata.file_checksums[fn] = self.file_checksums
199
200 provides = [self.pn]
201 for provide in self.provides:
202 if provide not in provides:
203 provides.append(provide)
204 cachedata.fn_provides[fn] = provides
205
206 for provide in provides:
207 cachedata.providers[provide].append(fn)
208 if provide not in cachedata.pn_provides[self.pn]:
209 cachedata.pn_provides[self.pn].append(provide)
210
211 for dep in self.depends:
212 if dep not in cachedata.deps[fn]:
213 cachedata.deps[fn].append(dep)
214 if dep not in cachedata.all_depends:
215 cachedata.all_depends.append(dep)
216
217 rprovides = self.rprovides
218 for package in self.packages:
219 cachedata.packages[package].append(fn)
220 rprovides += self.rprovides_pkg[package]
221
222 for rprovide in rprovides:
223 cachedata.rproviders[rprovide].append(fn)
224
225 for package in self.packages_dynamic:
226 cachedata.packages_dynamic[package].append(fn)
227
228 # Build hash of runtime depends and rececommends
229 for package in self.packages + [self.pn]:
230 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
231 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
232
233 # Collect files we may need for possible world-dep
234 # calculations
235 if not self.not_world:
236 cachedata.possible_world.append(fn)
237
238 # create a collection of all targets for sanity checking
239 # tasks, such as upstream versions, license, and tools for
240 # task and image creation.
241 cachedata.universe_target.append(self.pn)
242
243 cachedata.hashfn[fn] = self.hashfilename
244 for task, taskhash in self.basetaskhashes.iteritems():
245 identifier = '%s.%s' % (fn, task)
246 cachedata.basetaskhash[identifier] = taskhash
247
248 cachedata.inherits[fn] = self.inherits
249 cachedata.fakerootenv[fn] = self.fakerootenv
250 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
251 cachedata.fakerootdirs[fn] = self.fakerootdirs
252
253
254
255class Cache(object):
256 """
257 BitBake Cache implementation
258 """
259
260 def __init__(self, data, data_hash, caches_array):
261 # Pass caches_array information into Cache Constructor
262 # It will be used in later for deciding whether we
263 # need extra cache file dump/load support
264 self.caches_array = caches_array
265 self.cachedir = data.getVar("CACHE", True)
266 self.clean = set()
267 self.checked = set()
268 self.depends_cache = {}
269 self.data = None
270 self.data_fn = None
271 self.cacheclean = True
272 self.data_hash = data_hash
273
274 if self.cachedir in [None, '']:
275 self.has_cache = False
276 logger.info("Not using a cache. "
277 "Set CACHE = <directory> to enable.")
278 return
279
280 self.has_cache = True
281 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
282
283 logger.debug(1, "Using cache in '%s'", self.cachedir)
284 bb.utils.mkdirhier(self.cachedir)
285
286 cache_ok = True
287 if self.caches_array:
288 for cache_class in self.caches_array:
289 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
290 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
291 cache_ok = cache_ok and os.path.exists(cachefile)
292 cache_class.init_cacheData(self)
293 if cache_ok:
294 self.load_cachefile()
295 elif os.path.isfile(self.cachefile):
296 logger.info("Out of date cache found, rebuilding...")
297
298 def load_cachefile(self):
299 # Firstly, using core cache file information for
300 # valid checking
301 with open(self.cachefile, "rb") as cachefile:
302 pickled = pickle.Unpickler(cachefile)
303 try:
304 cache_ver = pickled.load()
305 bitbake_ver = pickled.load()
306 except Exception:
307 logger.info('Invalid cache, rebuilding...')
308 return
309
310 if cache_ver != __cache_version__:
311 logger.info('Cache version mismatch, rebuilding...')
312 return
313 elif bitbake_ver != bb.__version__:
314 logger.info('Bitbake version mismatch, rebuilding...')
315 return
316
317
318 cachesize = 0
319 previous_progress = 0
320 previous_percent = 0
321
322 # Calculate the correct cachesize of all those cache files
323 for cache_class in self.caches_array:
324 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
325 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
326 with open(cachefile, "rb") as cachefile:
327 cachesize += os.fstat(cachefile.fileno()).st_size
328
329 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
330
331 for cache_class in self.caches_array:
332 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
333 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
334 with open(cachefile, "rb") as cachefile:
335 pickled = pickle.Unpickler(cachefile)
336 while cachefile:
337 try:
338 key = pickled.load()
339 value = pickled.load()
340 except Exception:
341 break
342 if self.depends_cache.has_key(key):
343 self.depends_cache[key].append(value)
344 else:
345 self.depends_cache[key] = [value]
346 # only fire events on even percentage boundaries
347 current_progress = cachefile.tell() + previous_progress
348 current_percent = 100 * current_progress / cachesize
349 if current_percent > previous_percent:
350 previous_percent = current_percent
351 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
352 self.data)
353
354 previous_progress += current_progress
355
356 # Note: depends cache number is corresponding to the parsing file numbers.
357 # The same file has several caches, still regarded as one item in the cache
358 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
359 len(self.depends_cache)),
360 self.data)
361
362
363 @staticmethod
364 def virtualfn2realfn(virtualfn):
365 """
366 Convert a virtual file name to a real one + the associated subclass keyword
367 """
368
369 fn = virtualfn
370 cls = ""
371 if virtualfn.startswith('virtual:'):
372 elems = virtualfn.split(':')
373 cls = ":".join(elems[1:-1])
374 fn = elems[-1]
375 return (fn, cls)
376
377 @staticmethod
378 def realfn2virtual(realfn, cls):
379 """
380 Convert a real filename + the associated subclass keyword to a virtual filename
381 """
382 if cls == "":
383 return realfn
384 return "virtual:" + cls + ":" + realfn
385
386 @classmethod
387 def loadDataFull(cls, virtualfn, appends, cfgData):
388 """
389 Return a complete set of data for fn.
390 To do this, we need to parse the file.
391 """
392
393 (fn, virtual) = cls.virtualfn2realfn(virtualfn)
394
395 logger.debug(1, "Parsing %s (full)", fn)
396
397 cfgData.setVar("__ONLYFINALISE", virtual or "default")
398 bb_data = cls.load_bbfile(fn, appends, cfgData)
399 return bb_data[virtual]
400
401 @classmethod
402 def parse(cls, filename, appends, configdata, caches_array):
403 """Parse the specified filename, returning the recipe information"""
404 infos = []
405 datastores = cls.load_bbfile(filename, appends, configdata)
406 depends = []
407 for variant, data in sorted(datastores.iteritems(),
408 key=lambda i: i[0],
409 reverse=True):
410 virtualfn = cls.realfn2virtual(filename, variant)
411 depends = depends + (data.getVar("__depends", False) or [])
412 if depends and not variant:
413 data.setVar("__depends", depends)
414
415 info_array = []
416 for cache_class in caches_array:
417 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
418 info = cache_class(filename, data)
419 info_array.append(info)
420 infos.append((virtualfn, info_array))
421
422 return infos
423
424 def load(self, filename, appends, configdata):
425 """Obtain the recipe information for the specified filename,
426 using cached values if available, otherwise parsing.
427
428 Note that if it does parse to obtain the info, it will not
429 automatically add the information to the cache or to your
430 CacheData. Use the add or add_info method to do so after
431 running this, or use loadData instead."""
432 cached = self.cacheValid(filename, appends)
433 if cached:
434 infos = []
435 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
436 info_array = self.depends_cache[filename]
437 for variant in info_array[0].variants:
438 virtualfn = self.realfn2virtual(filename, variant)
439 infos.append((virtualfn, self.depends_cache[virtualfn]))
440 else:
441 logger.debug(1, "Parsing %s", filename)
442 return self.parse(filename, appends, configdata, self.caches_array)
443
444 return cached, infos
445
446 def loadData(self, fn, appends, cfgData, cacheData):
447 """Load the recipe info for the specified filename,
448 parsing and adding to the cache if necessary, and adding
449 the recipe information to the supplied CacheData instance."""
450 skipped, virtuals = 0, 0
451
452 cached, infos = self.load(fn, appends, cfgData)
453 for virtualfn, info_array in infos:
454 if info_array[0].skipped:
455 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
456 skipped += 1
457 else:
458 self.add_info(virtualfn, info_array, cacheData, not cached)
459 virtuals += 1
460
461 return cached, skipped, virtuals
462
463 def cacheValid(self, fn, appends):
464 """
465 Is the cache valid for fn?
466 Fast version, no timestamps checked.
467 """
468 if fn not in self.checked:
469 self.cacheValidUpdate(fn, appends)
470
471 # Is cache enabled?
472 if not self.has_cache:
473 return False
474 if fn in self.clean:
475 return True
476 return False
477
478 def cacheValidUpdate(self, fn, appends):
479 """
480 Is the cache valid for fn?
481 Make thorough (slower) checks including timestamps.
482 """
483 # Is cache enabled?
484 if not self.has_cache:
485 return False
486
487 self.checked.add(fn)
488
489 # File isn't in depends_cache
490 if not fn in self.depends_cache:
491 logger.debug(2, "Cache: %s is not cached", fn)
492 return False
493
494 mtime = bb.parse.cached_mtime_noerror(fn)
495
496 # Check file still exists
497 if mtime == 0:
498 logger.debug(2, "Cache: %s no longer exists", fn)
499 self.remove(fn)
500 return False
501
502 info_array = self.depends_cache[fn]
503 # Check the file's timestamp
504 if mtime != info_array[0].timestamp:
505 logger.debug(2, "Cache: %s changed", fn)
506 self.remove(fn)
507 return False
508
509 # Check dependencies are still valid
510 depends = info_array[0].file_depends
511 if depends:
512 for f, old_mtime in depends:
513 fmtime = bb.parse.cached_mtime_noerror(f)
514 # Check if file still exists
515 if old_mtime != 0 and fmtime == 0:
516 logger.debug(2, "Cache: %s's dependency %s was removed",
517 fn, f)
518 self.remove(fn)
519 return False
520
521 if (fmtime != old_mtime):
522 logger.debug(2, "Cache: %s's dependency %s changed",
523 fn, f)
524 self.remove(fn)
525 return False
526
527 if hasattr(info_array[0], 'file_checksums'):
528 for _, fl in info_array[0].file_checksums.items():
529 for f in fl.split():
530 if not ('*' in f or os.path.exists(f)):
531 logger.debug(2, "Cache: %s's file checksum list file %s was removed",
532 fn, f)
533 self.remove(fn)
534 return False
535
536 if appends != info_array[0].appends:
537 logger.debug(2, "Cache: appends for %s changed", fn)
538 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
539 self.remove(fn)
540 return False
541
542 invalid = False
543 for cls in info_array[0].variants:
544 virtualfn = self.realfn2virtual(fn, cls)
545 self.clean.add(virtualfn)
546 if virtualfn not in self.depends_cache:
547 logger.debug(2, "Cache: %s is not cached", virtualfn)
548 invalid = True
549
550 # If any one of the variants is not present, mark as invalid for all
551 if invalid:
552 for cls in info_array[0].variants:
553 virtualfn = self.realfn2virtual(fn, cls)
554 if virtualfn in self.clean:
555 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
556 self.clean.remove(virtualfn)
557 if fn in self.clean:
558 logger.debug(2, "Cache: Marking %s as not clean", fn)
559 self.clean.remove(fn)
560 return False
561
562 self.clean.add(fn)
563 return True
564
565 def remove(self, fn):
566 """
567 Remove a fn from the cache
568 Called from the parser in error cases
569 """
570 if fn in self.depends_cache:
571 logger.debug(1, "Removing %s from cache", fn)
572 del self.depends_cache[fn]
573 if fn in self.clean:
574 logger.debug(1, "Marking %s as unclean", fn)
575 self.clean.remove(fn)
576
577 def sync(self):
578 """
579 Save the cache
580 Called from the parser when complete (or exiting)
581 """
582
583 if not self.has_cache:
584 return
585
586 if self.cacheclean:
587 logger.debug(2, "Cache is clean, not saving.")
588 return
589
590 file_dict = {}
591 pickler_dict = {}
592 for cache_class in self.caches_array:
593 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
594 cache_class_name = cache_class.__name__
595 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
596 file_dict[cache_class_name] = open(cachefile, "wb")
597 pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
598
599 pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
600 pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
601
602 try:
603 for key, info_array in self.depends_cache.iteritems():
604 for info in info_array:
605 if isinstance(info, RecipeInfoCommon):
606 cache_class_name = info.__class__.__name__
607 pickler_dict[cache_class_name].dump(key)
608 pickler_dict[cache_class_name].dump(info)
609 finally:
610 for cache_class in self.caches_array:
611 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
612 cache_class_name = cache_class.__name__
613 file_dict[cache_class_name].close()
614
615 del self.depends_cache
616
617 @staticmethod
618 def mtime(cachefile):
619 return bb.parse.cached_mtime_noerror(cachefile)
620
621 def add_info(self, filename, info_array, cacheData, parsed=None):
622 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
623 cacheData.add_from_recipeinfo(filename, info_array)
624
625 if not self.has_cache:
626 return
627
628 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
629 if parsed:
630 self.cacheclean = False
631 self.depends_cache[filename] = info_array
632
633 def add(self, file_name, data, cacheData, parsed=None):
634 """
635 Save data we need into the cache
636 """
637
638 realfn = self.virtualfn2realfn(file_name)[0]
639
640 info_array = []
641 for cache_class in self.caches_array:
642 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
643 info_array.append(cache_class(realfn, data))
644 self.add_info(file_name, info_array, cacheData, parsed)
645
646 @staticmethod
647 def load_bbfile(bbfile, appends, config):
648 """
649 Load and parse one .bb build file
650 Return the data and whether parsing resulted in the file being skipped
651 """
652 chdir_back = False
653
654 from bb import data, parse
655
656 # expand tmpdir to include this topdir
657 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
658 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
659 oldpath = os.path.abspath(os.getcwd())
660 parse.cached_mtime_noerror(bbfile_loc)
661 bb_data = data.init_db(config)
662 # The ConfHandler first looks if there is a TOPDIR and if not
663 # then it would call getcwd().
664 # Previously, we chdir()ed to bbfile_loc, called the handler
665 # and finally chdir()ed back, a couple of thousand times. We now
666 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
667 if not data.getVar('TOPDIR', bb_data):
668 chdir_back = True
669 data.setVar('TOPDIR', bbfile_loc, bb_data)
670 try:
671 if appends:
672 data.setVar('__BBAPPEND', " ".join(appends), bb_data)
673 bb_data = parse.handle(bbfile, bb_data)
674 if chdir_back:
675 os.chdir(oldpath)
676 return bb_data
677 except:
678 if chdir_back:
679 os.chdir(oldpath)
680 raise
681
682
683def init(cooker):
684 """
685 The Objective: Cache the minimum amount of data possible yet get to the
686 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
687
688 To do this, we intercept getVar calls and only cache the variables we see
689 being accessed. We rely on the cache getVar calls being made for all
690 variables bitbake might need to use to reach this stage. For each cached
691 file we need to track:
692
693 * Its mtime
694 * The mtimes of all its dependencies
695 * Whether it caused a parse.SkipPackage exception
696
697 Files causing parsing errors are evicted from the cache.
698
699 """
700 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
701
702
703class CacheData(object):
704 """
705 The data structures we compile from the cached data
706 """
707
708 def __init__(self, caches_array):
709 self.caches_array = caches_array
710 for cache_class in self.caches_array:
711 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
712 cache_class.init_cacheData(self)
713
714 # Direct cache variables
715 self.task_queues = {}
716 self.preferred = {}
717 self.tasks = {}
718 # Indirect Cache variables (set elsewhere)
719 self.ignored_dependencies = []
720 self.world_target = set()
721 self.bbfile_priority = {}
722
723 def add_from_recipeinfo(self, fn, info_array):
724 for info in info_array:
725 info.add_cacheData(self, fn)
726
727class MultiProcessCache(object):
728 """
729 BitBake multi-process cache implementation
730
731 Used by the codeparser & file checksum caches
732 """
733
734 def __init__(self):
735 self.cachefile = None
736 self.cachedata = self.create_cachedata()
737 self.cachedata_extras = self.create_cachedata()
738
739 def init_cache(self, d):
740 cachedir = (d.getVar("PERSISTENT_DIR", True) or
741 d.getVar("CACHE", True))
742 if cachedir in [None, '']:
743 return
744 bb.utils.mkdirhier(cachedir)
745 self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
746 logger.debug(1, "Using cache in '%s'", self.cachefile)
747
748 glf = bb.utils.lockfile(self.cachefile + ".lock")
749
750 try:
751 with open(self.cachefile, "rb") as f:
752 p = pickle.Unpickler(f)
753 data, version = p.load()
754 except:
755 bb.utils.unlockfile(glf)
756 return
757
758 bb.utils.unlockfile(glf)
759
760 if version != self.__class__.CACHE_VERSION:
761 return
762
763 self.cachedata = data
764
765 def internSet(self, items):
766 new = set()
767 for i in items:
768 new.add(intern(i))
769 return new
770
771 def compress_keys(self, data):
772 # Override in subclasses if desired
773 return
774
775 def create_cachedata(self):
776 data = [{}]
777 return data
778
779 def save_extras(self, d):
780 if not self.cachefile:
781 return
782
783 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
784
785 i = os.getpid()
786 lf = None
787 while not lf:
788 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
789 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
790 if lf:
791 bb.utils.unlockfile(lf)
792 lf = None
793 i = i + 1
794 continue
795
796 with open(self.cachefile + "-" + str(i), "wb") as f:
797 p = pickle.Pickler(f, -1)
798 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
799
800 bb.utils.unlockfile(lf)
801 bb.utils.unlockfile(glf)
802
803 def merge_data(self, source, dest):
804 for j in range(0,len(dest)):
805 for h in source[j]:
806 if h not in dest[j]:
807 dest[j][h] = source[j][h]
808
809 def save_merge(self, d):
810 if not self.cachefile:
811 return
812
813 glf = bb.utils.lockfile(self.cachefile + ".lock")
814
815 try:
816 with open(self.cachefile, "rb") as f:
817 p = pickle.Unpickler(f)
818 data, version = p.load()
819 except (IOError, EOFError):
820 data, version = None, None
821
822 if version != self.__class__.CACHE_VERSION:
823 data = self.create_cachedata()
824
825 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
826 f = os.path.join(os.path.dirname(self.cachefile), f)
827 try:
828 with open(f, "rb") as fd:
829 p = pickle.Unpickler(fd)
830 extradata, version = p.load()
831 except (IOError, EOFError):
832 extradata, version = self.create_cachedata(), None
833
834 if version != self.__class__.CACHE_VERSION:
835 continue
836
837 self.merge_data(extradata, data)
838 os.unlink(f)
839
840 self.compress_keys(data)
841
842 with open(self.cachefile, "wb") as f:
843 p = pickle.Pickler(f, -1)
844 p.dump([data, self.__class__.CACHE_VERSION])
845
846 bb.utils.unlockfile(glf)
847
diff --git a/bitbake/lib/bb/cache_extra.py b/bitbake/lib/bb/cache_extra.py
new file mode 100644
index 0000000000..83f4959d6c
--- /dev/null
+++ b/bitbake/lib/bb/cache_extra.py
@@ -0,0 +1,75 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Extra RecipeInfo will be all defined in this file. Currently,
5# Only Hob (Image Creator) Requests some extra fields. So
6# HobRecipeInfo is defined. It's named HobRecipeInfo because it
7# is introduced by 'hob'. Users could also introduce other
8# RecipeInfo or simply use those already defined RecipeInfo.
9# In the following patch, this newly defined new extra RecipeInfo
10# will be dynamically loaded and used for loading/saving the extra
11# cache fields
12
13# Copyright (C) 2011, Intel Corporation. All rights reserved.
14
15# This program is free software; you can redistribute it and/or modify
16# it under the terms of the GNU General Public License version 2 as
17# published by the Free Software Foundation.
18#
19# This program is distributed in the hope that it will be useful,
20# but WITHOUT ANY WARRANTY; without even the implied warranty of
21# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22# GNU General Public License for more details.
23#
24# You should have received a copy of the GNU General Public License along
25# with this program; if not, write to the Free Software Foundation, Inc.,
26# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
27
28from bb.cache import RecipeInfoCommon
29
30class HobRecipeInfo(RecipeInfoCommon):
31 __slots__ = ()
32
33 classname = "HobRecipeInfo"
34 # please override this member with the correct data cache file
35 # such as (bb_cache.dat, bb_extracache_hob.dat)
36 cachefile = "bb_extracache_" + classname +".dat"
37
38 # override this member with the list of extra cache fields
39 # that this class will provide
40 cachefields = ['summary', 'license', 'section',
41 'description', 'homepage', 'bugtracker',
42 'prevision', 'files_info']
43
44 def __init__(self, filename, metadata):
45
46 self.summary = self.getvar('SUMMARY', metadata)
47 self.license = self.getvar('LICENSE', metadata)
48 self.section = self.getvar('SECTION', metadata)
49 self.description = self.getvar('DESCRIPTION', metadata)
50 self.homepage = self.getvar('HOMEPAGE', metadata)
51 self.bugtracker = self.getvar('BUGTRACKER', metadata)
52 self.prevision = self.getvar('PR', metadata)
53 self.files_info = self.getvar('FILES_INFO', metadata)
54
55 @classmethod
56 def init_cacheData(cls, cachedata):
57 # CacheData in Hob RecipeInfo Class
58 cachedata.summary = {}
59 cachedata.license = {}
60 cachedata.section = {}
61 cachedata.description = {}
62 cachedata.homepage = {}
63 cachedata.bugtracker = {}
64 cachedata.prevision = {}
65 cachedata.files_info = {}
66
67 def add_cacheData(self, cachedata, fn):
68 cachedata.summary[fn] = self.summary
69 cachedata.license[fn] = self.license
70 cachedata.section[fn] = self.section
71 cachedata.description[fn] = self.description
72 cachedata.homepage[fn] = self.homepage
73 cachedata.bugtracker[fn] = self.bugtracker
74 cachedata.prevision[fn] = self.prevision
75 cachedata.files_info[fn] = self.files_info
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py
new file mode 100644
index 0000000000..514ff0b1e6
--- /dev/null
+++ b/bitbake/lib/bb/checksum.py
@@ -0,0 +1,90 @@
1# Local file checksum cache implementation
2#
3# Copyright (C) 2012 Intel Corporation
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as
7# published by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License along
15# with this program; if not, write to the Free Software Foundation, Inc.,
16# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17
18import os
19import stat
20import bb.utils
21import logging
22from bb.cache import MultiProcessCache
23
24logger = logging.getLogger("BitBake.Cache")
25
26try:
27 import cPickle as pickle
28except ImportError:
29 import pickle
30 logger.info("Importing cPickle failed. "
31 "Falling back to a very slow implementation.")
32
33
34# mtime cache (non-persistent)
35# based upon the assumption that files do not change during bitbake run
36class FileMtimeCache(object):
37 cache = {}
38
39 def cached_mtime(self, f):
40 if f not in self.cache:
41 self.cache[f] = os.stat(f)[stat.ST_MTIME]
42 return self.cache[f]
43
44 def cached_mtime_noerror(self, f):
45 if f not in self.cache:
46 try:
47 self.cache[f] = os.stat(f)[stat.ST_MTIME]
48 except OSError:
49 return 0
50 return self.cache[f]
51
52 def update_mtime(self, f):
53 self.cache[f] = os.stat(f)[stat.ST_MTIME]
54 return self.cache[f]
55
56 def clear(self):
57 self.cache.clear()
58
59# Checksum + mtime cache (persistent)
60class FileChecksumCache(MultiProcessCache):
61 cache_file_name = "local_file_checksum_cache.dat"
62 CACHE_VERSION = 1
63
64 def __init__(self):
65 self.mtime_cache = FileMtimeCache()
66 MultiProcessCache.__init__(self)
67
68 def get_checksum(self, f):
69 entry = self.cachedata[0].get(f)
70 cmtime = self.mtime_cache.cached_mtime(f)
71 if entry:
72 (mtime, hashval) = entry
73 if cmtime == mtime:
74 return hashval
75 else:
76 bb.debug(2, "file %s changed mtime, recompute checksum" % f)
77
78 hashval = bb.utils.md5_file(f)
79 self.cachedata_extras[0][f] = (cmtime, hashval)
80 return hashval
81
82 def merge_data(self, source, dest):
83 for h in source[0]:
84 if h in dest:
85 (smtime, _) = source[0][h]
86 (dmtime, _) = dest[0][h]
87 if smtime > dmtime:
88 dest[0][h] = source[0][h]
89 else:
90 dest[0][h] = source[0][h]
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
new file mode 100644
index 0000000000..a50b9f268a
--- /dev/null
+++ b/bitbake/lib/bb/codeparser.py
@@ -0,0 +1,328 @@
1import ast
2import codegen
3import logging
4import os.path
5import bb.utils, bb.data
6from itertools import chain
7from pysh import pyshyacc, pyshlex, sherrors
8from bb.cache import MultiProcessCache
9
10
11logger = logging.getLogger('BitBake.CodeParser')
12
13try:
14 import cPickle as pickle
15except ImportError:
16 import pickle
17 logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
18
19
20def check_indent(codestr):
21 """If the code is indented, add a top level piece of code to 'remove' the indentation"""
22
23 i = 0
24 while codestr[i] in ["\n", "\t", " "]:
25 i = i + 1
26
27 if i == 0:
28 return codestr
29
30 if codestr[i-1] == "\t" or codestr[i-1] == " ":
31 return "if 1:\n" + codestr
32
33 return codestr
34
35
36class CodeParserCache(MultiProcessCache):
37 cache_file_name = "bb_codeparser.dat"
38 CACHE_VERSION = 4
39
40 def __init__(self):
41 MultiProcessCache.__init__(self)
42 self.pythoncache = self.cachedata[0]
43 self.shellcache = self.cachedata[1]
44 self.pythoncacheextras = self.cachedata_extras[0]
45 self.shellcacheextras = self.cachedata_extras[1]
46
47 def init_cache(self, d):
48 MultiProcessCache.init_cache(self, d)
49
50 # cachedata gets re-assigned in the parent
51 self.pythoncache = self.cachedata[0]
52 self.shellcache = self.cachedata[1]
53
54 def compress_keys(self, data):
55 # When the dicts are originally created, python calls intern() on the set keys
56 # which significantly improves memory usage. Sadly the pickle/unpickle process
57 # doesn't call intern() on the keys and results in the same strings being duplicated
58 # in memory. This also means pickle will save the same string multiple times in
59 # the cache file. By interning the data here, the cache file shrinks dramatically
60 # meaning faster load times and the reloaded cache files also consume much less
61 # memory. This is worth any performance hit from this loops and the use of the
62 # intern() data storage.
63 # Python 3.x may behave better in this area
64 for h in data[0]:
65 data[0][h]["refs"] = self.internSet(data[0][h]["refs"])
66 data[0][h]["execs"] = self.internSet(data[0][h]["execs"])
67 for k in data[0][h]["contains"]:
68 data[0][h]["contains"][k] = self.internSet(data[0][h]["contains"][k])
69 for h in data[1]:
70 data[1][h]["execs"] = self.internSet(data[1][h]["execs"])
71 return
72
73 def create_cachedata(self):
74 data = [{}, {}]
75 return data
76
77codeparsercache = CodeParserCache()
78
79def parser_cache_init(d):
80 codeparsercache.init_cache(d)
81
82def parser_cache_save(d):
83 codeparsercache.save_extras(d)
84
85def parser_cache_savemerge(d):
86 codeparsercache.save_merge(d)
87
88Logger = logging.getLoggerClass()
89class BufferedLogger(Logger):
90 def __init__(self, name, level=0, target=None):
91 Logger.__init__(self, name)
92 self.setLevel(level)
93 self.buffer = []
94 self.target = target
95
96 def handle(self, record):
97 self.buffer.append(record)
98
99 def flush(self):
100 for record in self.buffer:
101 self.target.handle(record)
102 self.buffer = []
103
104class PythonParser():
105 getvars = ("d.getVar", "bb.data.getVar", "data.getVar", "d.appendVar", "d.prependVar")
106 containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains")
107 execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
108
109 def warn(self, func, arg):
110 """Warn about calls of bitbake APIs which pass a non-literal
111 argument for the variable name, as we're not able to track such
112 a reference.
113 """
114
115 try:
116 funcstr = codegen.to_source(func)
117 argstr = codegen.to_source(arg)
118 except TypeError:
119 self.log.debug(2, 'Failed to convert function and argument to source form')
120 else:
121 self.log.debug(1, self.unhandled_message % (funcstr, argstr))
122
123 def visit_Call(self, node):
124 name = self.called_node_name(node.func)
125 if name in self.getvars or name in self.containsfuncs:
126 if isinstance(node.args[0], ast.Str):
127 varname = node.args[0].s
128 if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
129 if varname not in self.contains:
130 self.contains[varname] = set()
131 self.contains[varname].add(node.args[1].s)
132 else:
133 self.references.add(node.args[0].s)
134 else:
135 self.warn(node.func, node.args[0])
136 elif name in self.execfuncs:
137 if isinstance(node.args[0], ast.Str):
138 self.var_execs.add(node.args[0].s)
139 else:
140 self.warn(node.func, node.args[0])
141 elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
142 self.execs.add(name)
143
144 def called_node_name(self, node):
145 """Given a called node, return its original string form"""
146 components = []
147 while node:
148 if isinstance(node, ast.Attribute):
149 components.append(node.attr)
150 node = node.value
151 elif isinstance(node, ast.Name):
152 components.append(node.id)
153 return '.'.join(reversed(components))
154 else:
155 break
156
157 def __init__(self, name, log):
158 self.var_execs = set()
159 self.contains = {}
160 self.execs = set()
161 self.references = set()
162 self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log)
163
164 self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
165 self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
166
167 def parse_python(self, node):
168 h = hash(str(node))
169
170 if h in codeparsercache.pythoncache:
171 self.references = codeparsercache.pythoncache[h]["refs"]
172 self.execs = codeparsercache.pythoncache[h]["execs"]
173 self.contains = codeparsercache.pythoncache[h]["contains"]
174 return
175
176 if h in codeparsercache.pythoncacheextras:
177 self.references = codeparsercache.pythoncacheextras[h]["refs"]
178 self.execs = codeparsercache.pythoncacheextras[h]["execs"]
179 self.contains = codeparsercache.pythoncacheextras[h]["contains"]
180 return
181
182 code = compile(check_indent(str(node)), "<string>", "exec",
183 ast.PyCF_ONLY_AST)
184
185 for n in ast.walk(code):
186 if n.__class__.__name__ == "Call":
187 self.visit_Call(n)
188
189 self.execs.update(self.var_execs)
190
191 codeparsercache.pythoncacheextras[h] = {}
192 codeparsercache.pythoncacheextras[h]["refs"] = self.references
193 codeparsercache.pythoncacheextras[h]["execs"] = self.execs
194 codeparsercache.pythoncacheextras[h]["contains"] = self.contains
195
196class ShellParser():
197 def __init__(self, name, log):
198 self.funcdefs = set()
199 self.allexecs = set()
200 self.execs = set()
201 self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log)
202 self.unhandled_template = "unable to handle non-literal command '%s'"
203 self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
204
205 def parse_shell(self, value):
206 """Parse the supplied shell code in a string, returning the external
207 commands it executes.
208 """
209
210 h = hash(str(value))
211
212 if h in codeparsercache.shellcache:
213 self.execs = codeparsercache.shellcache[h]["execs"]
214 return self.execs
215
216 if h in codeparsercache.shellcacheextras:
217 self.execs = codeparsercache.shellcacheextras[h]["execs"]
218 return self.execs
219
220 try:
221 tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
222 except pyshlex.NeedMore:
223 raise sherrors.ShellSyntaxError("Unexpected EOF")
224
225 for token in tokens:
226 self.process_tokens(token)
227 self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
228
229 codeparsercache.shellcacheextras[h] = {}
230 codeparsercache.shellcacheextras[h]["execs"] = self.execs
231
232 return self.execs
233
234 def process_tokens(self, tokens):
235 """Process a supplied portion of the syntax tree as returned by
236 pyshyacc.parse.
237 """
238
239 def function_definition(value):
240 self.funcdefs.add(value.name)
241 return [value.body], None
242
243 def case_clause(value):
244 # Element 0 of each item in the case is the list of patterns, and
245 # Element 1 of each item in the case is the list of commands to be
246 # executed when that pattern matches.
247 words = chain(*[item[0] for item in value.items])
248 cmds = chain(*[item[1] for item in value.items])
249 return cmds, words
250
251 def if_clause(value):
252 main = chain(value.cond, value.if_cmds)
253 rest = value.else_cmds
254 if isinstance(rest, tuple) and rest[0] == "elif":
255 return chain(main, if_clause(rest[1]))
256 else:
257 return chain(main, rest)
258
259 def simple_command(value):
260 return None, chain(value.words, (assign[1] for assign in value.assigns))
261
262 token_handlers = {
263 "and_or": lambda x: ((x.left, x.right), None),
264 "async": lambda x: ([x], None),
265 "brace_group": lambda x: (x.cmds, None),
266 "for_clause": lambda x: (x.cmds, x.items),
267 "function_definition": function_definition,
268 "if_clause": lambda x: (if_clause(x), None),
269 "pipeline": lambda x: (x.commands, None),
270 "redirect_list": lambda x: ([x.cmd], None),
271 "subshell": lambda x: (x.cmds, None),
272 "while_clause": lambda x: (chain(x.condition, x.cmds), None),
273 "until_clause": lambda x: (chain(x.condition, x.cmds), None),
274 "simple_command": simple_command,
275 "case_clause": case_clause,
276 }
277
278 for token in tokens:
279 name, value = token
280 try:
281 more_tokens, words = token_handlers[name](value)
282 except KeyError:
283 raise NotImplementedError("Unsupported token type " + name)
284
285 if more_tokens:
286 self.process_tokens(more_tokens)
287
288 if words:
289 self.process_words(words)
290
291 def process_words(self, words):
292 """Process a set of 'words' in pyshyacc parlance, which includes
293 extraction of executed commands from $() blocks, as well as grabbing
294 the command name argument.
295 """
296
297 words = list(words)
298 for word in list(words):
299 wtree = pyshlex.make_wordtree(word[1])
300 for part in wtree:
301 if not isinstance(part, list):
302 continue
303
304 if part[0] in ('`', '$('):
305 command = pyshlex.wordtree_as_string(part[1:-1])
306 self.parse_shell(command)
307
308 if word[0] in ("cmd_name", "cmd_word"):
309 if word in words:
310 words.remove(word)
311
312 usetoken = False
313 for word in words:
314 if word[0] in ("cmd_name", "cmd_word") or \
315 (usetoken and word[0] == "TOKEN"):
316 if "=" in word[1]:
317 usetoken = True
318 continue
319
320 cmd = word[1]
321 if cmd.startswith("$"):
322 self.log.debug(1, self.unhandled_template % cmd)
323 elif cmd == "eval":
324 command = " ".join(word for _, word in words[1:])
325 self.parse_shell(command)
326 else:
327 self.allexecs.add(cmd)
328 break
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
new file mode 100644
index 0000000000..84fcdf9433
--- /dev/null
+++ b/bitbake/lib/bb/command.py
@@ -0,0 +1,444 @@
1"""
2BitBake 'Command' module
3
4Provide an interface to interact with the bitbake server through 'commands'
5"""
6
7# Copyright (C) 2006-2007 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22"""
23The bitbake server takes 'commands' from its UI/commandline.
24Commands are either synchronous or asynchronous.
25Async commands return data to the client in the form of events.
26Sync commands must only return data through the function return value
27and must not trigger events, directly or indirectly.
28Commands are queued in a CommandQueue
29"""
30
31import bb.event
32import bb.cooker
33
34class CommandCompleted(bb.event.Event):
35 pass
36
37class CommandExit(bb.event.Event):
38 def __init__(self, exitcode):
39 bb.event.Event.__init__(self)
40 self.exitcode = int(exitcode)
41
42class CommandFailed(CommandExit):
43 def __init__(self, message):
44 self.error = message
45 CommandExit.__init__(self, 1)
46
47class CommandError(Exception):
48 pass
49
50class Command:
51 """
52 A queue of asynchronous commands for bitbake
53 """
54 def __init__(self, cooker):
55 self.cooker = cooker
56 self.cmds_sync = CommandsSync()
57 self.cmds_async = CommandsAsync()
58
59 # FIXME Add lock for this
60 self.currentAsyncCommand = None
61
62 def runCommand(self, commandline, ro_only = False):
63 command = commandline.pop(0)
64 if hasattr(CommandsSync, command):
65 # Can run synchronous commands straight away
66 command_method = getattr(self.cmds_sync, command)
67 if ro_only:
68 if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
69 return None, "Not able to execute not readonly commands in readonly mode"
70 try:
71 result = command_method(self, commandline)
72 except CommandError as exc:
73 return None, exc.args[0]
74 except Exception:
75 import traceback
76 return None, traceback.format_exc()
77 else:
78 return result, None
79 if self.currentAsyncCommand is not None:
80 return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
81 if command not in CommandsAsync.__dict__:
82 return None, "No such command"
83 self.currentAsyncCommand = (command, commandline)
84 self.cooker.configuration.server_register_idlecallback(self.cooker.runCommands, self.cooker)
85 return True, None
86
87 def runAsyncCommand(self):
88 try:
89 if self.cooker.state == bb.cooker.state.error:
90 return False
91 if self.currentAsyncCommand is not None:
92 (command, options) = self.currentAsyncCommand
93 commandmethod = getattr(CommandsAsync, command)
94 needcache = getattr( commandmethod, "needcache" )
95 if needcache and self.cooker.state != bb.cooker.state.running:
96 self.cooker.updateCache()
97 return True
98 else:
99 commandmethod(self.cmds_async, self, options)
100 return False
101 else:
102 return False
103 except KeyboardInterrupt as exc:
104 self.finishAsyncCommand("Interrupted")
105 return False
106 except SystemExit as exc:
107 arg = exc.args[0]
108 if isinstance(arg, basestring):
109 self.finishAsyncCommand(arg)
110 else:
111 self.finishAsyncCommand("Exited with %s" % arg)
112 return False
113 except Exception as exc:
114 import traceback
115 if isinstance(exc, bb.BBHandledException):
116 self.finishAsyncCommand("")
117 else:
118 self.finishAsyncCommand(traceback.format_exc())
119 return False
120
121 def finishAsyncCommand(self, msg=None, code=None):
122 if msg or msg == "":
123 bb.event.fire(CommandFailed(msg), self.cooker.event_data)
124 elif code:
125 bb.event.fire(CommandExit(code), self.cooker.event_data)
126 else:
127 bb.event.fire(CommandCompleted(), self.cooker.event_data)
128 self.currentAsyncCommand = None
129 self.cooker.finishcommand()
130
131class CommandsSync:
132 """
133 A class of synchronous commands
134 These should run quickly so as not to hurt interactive performance.
135 These must not influence any running synchronous command.
136 """
137
138 def stateShutdown(self, command, params):
139 """
140 Trigger cooker 'shutdown' mode
141 """
142 command.cooker.shutdown(False)
143
144 def stateForceShutdown(self, command, params):
145 """
146 Stop the cooker
147 """
148 command.cooker.shutdown(True)
149
150 def getAllKeysWithFlags(self, command, params):
151 """
152 Returns a dump of the global state. Call with
153 variable flags to be retrieved as params.
154 """
155 flaglist = params[0]
156 return command.cooker.getAllKeysWithFlags(flaglist)
157 getAllKeysWithFlags.readonly = True
158
159 def getVariable(self, command, params):
160 """
161 Read the value of a variable from data
162 """
163 varname = params[0]
164 expand = True
165 if len(params) > 1:
166 expand = (params[1] == "True")
167
168 return command.cooker.data.getVar(varname, expand)
169 getVariable.readonly = True
170
171 def setVariable(self, command, params):
172 """
173 Set the value of variable in data
174 """
175 varname = params[0]
176 value = str(params[1])
177 command.cooker.data.setVar(varname, value)
178
179 def setConfig(self, command, params):
180 """
181 Set the value of variable in configuration
182 """
183 varname = params[0]
184 value = str(params[1])
185 setattr(command.cooker.configuration, varname, value)
186
187 def enableDataTracking(self, command, params):
188 """
189 Enable history tracking for variables
190 """
191 command.cooker.enableDataTracking()
192
193 def disableDataTracking(self, command, params):
194 """
195 Disable history tracking for variables
196 """
197 command.cooker.disableDataTracking()
198
199 def setPrePostConfFiles(self, command, params):
200 prefiles = params[0].split()
201 postfiles = params[1].split()
202 command.cooker.configuration.prefile = prefiles
203 command.cooker.configuration.postfile = postfiles
204
205 def getCpuCount(self, command, params):
206 """
207 Get the CPU count on the bitbake server
208 """
209 return bb.utils.cpu_count()
210 getCpuCount.readonly = True
211
212 def matchFile(self, command, params):
213 fMatch = params[0]
214 return command.cooker.matchFile(fMatch)
215
216 def generateNewImage(self, command, params):
217 image = params[0]
218 base_image = params[1]
219 package_queue = params[2]
220 timestamp = params[3]
221 description = params[4]
222 return command.cooker.generateNewImage(image, base_image,
223 package_queue, timestamp, description)
224
225 def ensureDir(self, command, params):
226 directory = params[0]
227 bb.utils.mkdirhier(directory)
228
229 def setVarFile(self, command, params):
230 """
231 Save a variable in a file; used for saving in a configuration file
232 """
233 var = params[0]
234 val = params[1]
235 default_file = params[2]
236 op = params[3]
237 command.cooker.modifyConfigurationVar(var, val, default_file, op)
238
239 def removeVarFile(self, command, params):
240 """
241 Remove a variable declaration from a file
242 """
243 var = params[0]
244 command.cooker.removeConfigurationVar(var)
245
246 def createConfigFile(self, command, params):
247 """
248 Create an extra configuration file
249 """
250 name = params[0]
251 command.cooker.createConfigFile(name)
252
253 def setEventMask(self, command, params):
254 handlerNum = params[0]
255 llevel = params[1]
256 debug_domains = params[2]
257 mask = params[3]
258 return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask)
259
260 def setFeatures(self, command, params):
261 """
262 Set the cooker features to include the passed list of features
263 """
264 features = params[0]
265 command.cooker.setFeatures(features)
266
267 # although we change the internal state of the cooker, this is transparent since
268 # we always take and leave the cooker in state.initial
269 setFeatures.readonly = True
270
271class CommandsAsync:
272 """
273 A class of asynchronous commands
274 These functions communicate via generated events.
275 Any function that requires metadata parsing should be here.
276 """
277
278 def buildFile(self, command, params):
279 """
280 Build a single specified .bb file
281 """
282 bfile = params[0]
283 task = params[1]
284
285 command.cooker.buildFile(bfile, task)
286 buildFile.needcache = False
287
288 def buildTargets(self, command, params):
289 """
290 Build a set of targets
291 """
292 pkgs_to_build = params[0]
293 task = params[1]
294
295 command.cooker.buildTargets(pkgs_to_build, task)
296 buildTargets.needcache = True
297
298 def generateDepTreeEvent(self, command, params):
299 """
300 Generate an event containing the dependency information
301 """
302 pkgs_to_build = params[0]
303 task = params[1]
304
305 command.cooker.generateDepTreeEvent(pkgs_to_build, task)
306 command.finishAsyncCommand()
307 generateDepTreeEvent.needcache = True
308
309 def generateDotGraph(self, command, params):
310 """
311 Dump dependency information to disk as .dot files
312 """
313 pkgs_to_build = params[0]
314 task = params[1]
315
316 command.cooker.generateDotGraphFiles(pkgs_to_build, task)
317 command.finishAsyncCommand()
318 generateDotGraph.needcache = True
319
320 def generateTargetsTree(self, command, params):
321 """
322 Generate a tree of buildable targets.
323 If klass is provided ensure all recipes that inherit the class are
324 included in the package list.
325 If pkg_list provided use that list (plus any extras brought in by
326 klass) rather than generating a tree for all packages.
327 """
328 klass = params[0]
329 pkg_list = params[1]
330
331 command.cooker.generateTargetsTree(klass, pkg_list)
332 command.finishAsyncCommand()
333 generateTargetsTree.needcache = True
334
335 def findCoreBaseFiles(self, command, params):
336 """
337 Find certain files in COREBASE directory. i.e. Layers
338 """
339 subdir = params[0]
340 filename = params[1]
341
342 command.cooker.findCoreBaseFiles(subdir, filename)
343 command.finishAsyncCommand()
344 findCoreBaseFiles.needcache = False
345
346 def findConfigFiles(self, command, params):
347 """
348 Find config files which provide appropriate values
349 for the passed configuration variable. i.e. MACHINE
350 """
351 varname = params[0]
352
353 command.cooker.findConfigFiles(varname)
354 command.finishAsyncCommand()
355 findConfigFiles.needcache = False
356
357 def findFilesMatchingInDir(self, command, params):
358 """
359 Find implementation files matching the specified pattern
360 in the requested subdirectory of a BBPATH
361 """
362 pattern = params[0]
363 directory = params[1]
364
365 command.cooker.findFilesMatchingInDir(pattern, directory)
366 command.finishAsyncCommand()
367 findFilesMatchingInDir.needcache = False
368
369 def findConfigFilePath(self, command, params):
370 """
371 Find the path of the requested configuration file
372 """
373 configfile = params[0]
374
375 command.cooker.findConfigFilePath(configfile)
376 command.finishAsyncCommand()
377 findConfigFilePath.needcache = False
378
379 def showVersions(self, command, params):
380 """
381 Show the currently selected versions
382 """
383 command.cooker.showVersions()
384 command.finishAsyncCommand()
385 showVersions.needcache = True
386
387 def showEnvironmentTarget(self, command, params):
388 """
389 Print the environment of a target recipe
390 (needs the cache to work out which recipe to use)
391 """
392 pkg = params[0]
393
394 command.cooker.showEnvironment(None, pkg)
395 command.finishAsyncCommand()
396 showEnvironmentTarget.needcache = True
397
398 def showEnvironment(self, command, params):
399 """
400 Print the standard environment
401 or if specified the environment for a specified recipe
402 """
403 bfile = params[0]
404
405 command.cooker.showEnvironment(bfile)
406 command.finishAsyncCommand()
407 showEnvironment.needcache = False
408
409 def parseFiles(self, command, params):
410 """
411 Parse the .bb files
412 """
413 command.cooker.updateCache()
414 command.finishAsyncCommand()
415 parseFiles.needcache = True
416
417 def compareRevisions(self, command, params):
418 """
419 Parse the .bb files
420 """
421 if bb.fetch.fetcher_compare_revisions(command.cooker.data):
422 command.finishAsyncCommand(code=1)
423 else:
424 command.finishAsyncCommand()
425 compareRevisions.needcache = True
426
427 def triggerEvent(self, command, params):
428 """
429 Trigger a certain event
430 """
431 event = params[0]
432 bb.event.fire(eval(event), command.cooker.data)
433 command.currentAsyncCommand = None
434 triggerEvent.needcache = False
435
436 def resetCooker(self, command, params):
437 """
438 Reset the cooker to its initial state, thus forcing a reparse for
439 any async command that has the needcache property set to True
440 """
441 command.cooker.reset()
442 command.finishAsyncCommand()
443 resetCooker.needcache = False
444
diff --git a/bitbake/lib/bb/compat.py b/bitbake/lib/bb/compat.py
new file mode 100644
index 0000000000..de1923d28a
--- /dev/null
+++ b/bitbake/lib/bb/compat.py
@@ -0,0 +1,6 @@
1"""Code pulled from future python versions, here for compatibility"""
2
3from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict
4from functools import total_ordering
5
6
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
new file mode 100644
index 0000000000..f44a08889a
--- /dev/null
+++ b/bitbake/lib/bb/cooker.py
@@ -0,0 +1,1874 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25from __future__ import print_function
26import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
33from cStringIO import StringIO
34from contextlib import closing
35from functools import wraps
36from collections import defaultdict
37import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
39import Queue
40import signal
41import prserv.serv
42
43logger = logging.getLogger("BitBake")
44collectlog = logging.getLogger("BitBake.Collection")
45buildlog = logging.getLogger("BitBake.Build")
46parselog = logging.getLogger("BitBake.Parsing")
47providerlog = logging.getLogger("BitBake.Provider")
48
49class NoSpecificMatch(bb.BBHandledException):
50 """
51 Exception raised when no or multiple file matches are found
52 """
53
54class NothingToBuild(Exception):
55 """
56 Exception raised when there is nothing to build
57 """
58
59class CollectionError(bb.BBHandledException):
60 """
61 Exception raised when layer configuration is incorrect
62 """
63
64class state:
65 initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
66
67
68class SkippedPackage:
69 def __init__(self, info = None, reason = None):
70 self.pn = None
71 self.skipreason = None
72 self.provides = None
73 self.rprovides = None
74
75 if info:
76 self.pn = info.pn
77 self.skipreason = info.skipreason
78 self.provides = info.provides
79 self.rprovides = info.rprovides
80 elif reason:
81 self.skipreason = reason
82
83
84class CookerFeatures(object):
85 _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
86
87 def __init__(self):
88 self._features=set()
89
90 def setFeature(self, f):
91 # validate we got a request for a feature we support
92 if f not in CookerFeatures._feature_list:
93 return
94 self._features.add(f)
95
96 def __contains__(self, f):
97 return f in self._features
98
99 def __iter__(self):
100 return self._features.__iter__()
101
102 def next(self):
103 return self._features.next()
104
105
106#============================================================================#
107# BBCooker
108#============================================================================#
109class BBCooker:
110 """
111 Manages one bitbake build run
112 """
113
114 def __init__(self, configuration, featureSet = []):
115 self.recipecache = None
116 self.skiplist = {}
117 self.featureset = CookerFeatures()
118 for f in featureSet:
119 self.featureset.setFeature(f)
120
121 self.configuration = configuration
122
123 self.initConfigurationData()
124
125 # Take a lock so only one copy of bitbake can run against a given build
126 # directory at a time
127 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
128 self.lock = bb.utils.lockfile(lockfile, False, False)
129 if not self.lock:
130 bb.fatal("Only one copy of bitbake should be run against a build directory")
131 try:
132 self.lock.seek(0)
133 self.lock.truncate()
134 if len(configuration.interface) >= 2:
135 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
136 self.lock.flush()
137 except:
138 pass
139
140 # TOSTOP must not be set or our children will hang when they output
141 fd = sys.stdout.fileno()
142 if os.isatty(fd):
143 import termios
144 tcattr = termios.tcgetattr(fd)
145 if tcattr[3] & termios.TOSTOP:
146 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
147 tcattr[3] = tcattr[3] & ~termios.TOSTOP
148 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
149
150 self.command = bb.command.Command(self)
151 self.state = state.initial
152
153 self.parser = None
154
155 signal.signal(signal.SIGTERM, self.sigterm_exception)
156
157 def sigterm_exception(self, signum, stackframe):
158 bb.warn("Cooker recieved SIGTERM, shutting down...")
159 self.state = state.forceshutdown
160
161 def setFeatures(self, features):
162 # we only accept a new feature set if we're in state initial, so we can reset without problems
163 if self.state != state.initial:
164 raise Exception("Illegal state for feature set change")
165 original_featureset = list(self.featureset)
166 for feature in features:
167 self.featureset.setFeature(feature)
168 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
169 if (original_featureset != list(self.featureset)):
170 self.reset()
171
172 def initConfigurationData(self):
173
174 self.state = state.initial
175 self.caches_array = []
176
177 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
178 self.enableDataTracking()
179
180 all_extra_cache_names = []
181 # We hardcode all known cache types in a single place, here.
182 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
183 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
184
185 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
186
187 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
188 # This is the entry point, no further check needed!
189 for var in caches_name_array:
190 try:
191 module_name, cache_name = var.split(':')
192 module = __import__(module_name, fromlist=(cache_name,))
193 self.caches_array.append(getattr(module, cache_name))
194 except ImportError as exc:
195 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
196 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
197
198 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
199 self.databuilder.parseBaseConfiguration()
200 self.data = self.databuilder.data
201 self.data_hash = self.databuilder.data_hash
202
203 #
204 # Special updated configuration we use for firing events
205 #
206 self.event_data = bb.data.createCopy(self.data)
207 bb.data.update_data(self.event_data)
208 bb.parse.init_parser(self.event_data)
209
210 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
211 self.disableDataTracking()
212
213 def enableDataTracking(self):
214 self.configuration.tracking = True
215 if hasattr(self, "data"):
216 self.data.enableTracking()
217
218 def disableDataTracking(self):
219 self.configuration.tracking = False
220 if hasattr(self, "data"):
221 self.data.disableTracking()
222
223 def modifyConfigurationVar(self, var, val, default_file, op):
224 if op == "append":
225 self.appendConfigurationVar(var, val, default_file)
226 elif op == "set":
227 self.saveConfigurationVar(var, val, default_file, "=")
228 elif op == "earlyAssign":
229 self.saveConfigurationVar(var, val, default_file, "?=")
230
231
232 def appendConfigurationVar(self, var, val, default_file):
233 #add append var operation to the end of default_file
234 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
235
236 total = "#added by hob"
237 total += "\n%s += \"%s\"\n" % (var, val)
238
239 with open(default_file, 'a') as f:
240 f.write(total)
241
242 #add to history
243 loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
244 self.data.appendVar(var, val, **loginfo)
245
246 def saveConfigurationVar(self, var, val, default_file, op):
247
248 replaced = False
249 #do not save if nothing changed
250 if str(val) == self.data.getVar(var):
251 return
252
253 conf_files = self.data.varhistory.get_variable_files(var)
254
255 #format the value when it is a list
256 if isinstance(val, list):
257 listval = ""
258 for value in val:
259 listval += "%s " % value
260 val = listval
261
262 topdir = self.data.getVar("TOPDIR")
263
264 #comment or replace operations made on var
265 for conf_file in conf_files:
266 if topdir in conf_file:
267 with open(conf_file, 'r') as f:
268 contents = f.readlines()
269
270 lines = self.data.varhistory.get_variable_lines(var, conf_file)
271 for line in lines:
272 total = ""
273 i = 0
274 for c in contents:
275 total += c
276 i = i + 1
277 if i==int(line):
278 end_index = len(total)
279 index = total.rfind(var, 0, end_index)
280
281 begin_line = total.count("\n",0,index)
282 end_line = int(line)
283
284 #check if the variable was saved before in the same way
285 #if true it replace the place where the variable was declared
286 #else it comments it
287 if contents[begin_line-1]== "#added by hob\n":
288 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
289 replaced = True
290 else:
291 for ii in range(begin_line, end_line):
292 contents[ii] = "#" + contents[ii]
293
294 with open(conf_file, 'w') as f:
295 f.writelines(contents)
296
297 if replaced == False:
298 #remove var from history
299 self.data.varhistory.del_var_history(var)
300
301 #add var to the end of default_file
302 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
303
304 #add the variable on a single line, to be easy to replace the second time
305 total = "\n#added by hob"
306 total += "\n%s %s \"%s\"\n" % (var, op, val)
307
308 with open(default_file, 'a') as f:
309 f.write(total)
310
311 #add to history
312 loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
313 self.data.setVar(var, val, **loginfo)
314
315 def removeConfigurationVar(self, var):
316 conf_files = self.data.varhistory.get_variable_files(var)
317 topdir = self.data.getVar("TOPDIR")
318
319 for conf_file in conf_files:
320 if topdir in conf_file:
321 with open(conf_file, 'r') as f:
322 contents = f.readlines()
323
324 lines = self.data.varhistory.get_variable_lines(var, conf_file)
325 for line in lines:
326 total = ""
327 i = 0
328 for c in contents:
329 total += c
330 i = i + 1
331 if i==int(line):
332 end_index = len(total)
333 index = total.rfind(var, 0, end_index)
334
335 begin_line = total.count("\n",0,index)
336
337 #check if the variable was saved before in the same way
338 if contents[begin_line-1]== "#added by hob\n":
339 contents[begin_line-1] = contents[begin_line] = "\n"
340 else:
341 contents[begin_line] = "\n"
342 #remove var from history
343 self.data.varhistory.del_var_history(var, conf_file, line)
344 #remove variable
345 self.data.delVar(var)
346
347 with open(conf_file, 'w') as f:
348 f.writelines(contents)
349
350 def createConfigFile(self, name):
351 path = os.getcwd()
352 confpath = os.path.join(path, "conf", name)
353 open(confpath, 'w').close()
354
355 def parseConfiguration(self):
356 # Set log file verbosity
357 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0"))
358 if verboselogs:
359 bb.msg.loggerVerboseLogs = True
360
361 # Change nice level if we're asked to
362 nice = self.data.getVar("BB_NICE_LEVEL", True)
363 if nice:
364 curnice = os.nice(0)
365 nice = int(nice) - curnice
366 buildlog.verbose("Renice to %s " % os.nice(nice))
367
368 if self.recipecache:
369 del self.recipecache
370 self.recipecache = bb.cache.CacheData(self.caches_array)
371
372 self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
373
374 def runCommands(self, server, data, abort):
375 """
376 Run any queued asynchronous command
377 This is done by the idle handler so it runs in true context rather than
378 tied to any UI.
379 """
380
381 return self.command.runAsyncCommand()
382
383 def showVersions(self):
384
385 pkg_pn = self.recipecache.pkg_pn
386 (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
387
388 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
389 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
390
391 for p in sorted(pkg_pn):
392 pref = preferred_versions[p]
393 latest = latest_versions[p]
394
395 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
396 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
397
398 if pref == latest:
399 prefstr = ""
400
401 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
402
403 def showEnvironment(self, buildfile = None, pkgs_to_build = []):
404 """
405 Show the outer or per-package environment
406 """
407 fn = None
408 envdata = None
409
410 if buildfile:
411 # Parse the configuration here. We need to do it explicitly here since
412 # this showEnvironment() code path doesn't use the cache
413 self.parseConfiguration()
414
415 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
416 fn = self.matchFile(fn)
417 fn = bb.cache.Cache.realfn2virtual(fn, cls)
418 elif len(pkgs_to_build) == 1:
419 ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
420 if pkgs_to_build[0] in set(ignore.split()):
421 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
422
423 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort)
424
425 targetid = taskdata.getbuild_id(pkgs_to_build[0])
426 fnid = taskdata.build_targets[targetid][0]
427 fn = taskdata.fn_index[fnid]
428 else:
429 envdata = self.data
430
431 if fn:
432 try:
433 envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
434 except Exception as e:
435 parselog.exception("Unable to read %s", fn)
436 raise
437
438 # Display history
439 with closing(StringIO()) as env:
440 self.data.inchistory.emit(env)
441 logger.plain(env.getvalue())
442
443 # emit variables and shell functions
444 data.update_data(envdata)
445 with closing(StringIO()) as env:
446 data.emit_env(env, envdata, True)
447 logger.plain(env.getvalue())
448
449 # emit the metadata which isnt valid shell
450 data.expandKeys(envdata)
451 for e in envdata.keys():
452 if data.getVarFlag( e, 'python', envdata ):
453 logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
454
455
456 def buildTaskData(self, pkgs_to_build, task, abort):
457 """
458 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
459 """
460 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
461
462 # A task of None means use the default task
463 if task is None:
464 task = self.configuration.cmd
465
466 fulltargetlist = self.checkPackages(pkgs_to_build)
467
468 localdata = data.createCopy(self.data)
469 bb.data.update_data(localdata)
470 bb.data.expandKeys(localdata)
471 taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist)
472
473 current = 0
474 runlist = []
475 for k in fulltargetlist:
476 ktask = task
477 if ":do_" in k:
478 k2 = k.split(":do_")
479 k = k2[0]
480 ktask = k2[1]
481 taskdata.add_provider(localdata, self.recipecache, k)
482 current += 1
483 runlist.append([k, "do_%s" % ktask])
484 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
485 taskdata.add_unresolved(localdata, self.recipecache)
486 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
487 return taskdata, runlist, fulltargetlist
488
489 def prepareTreeData(self, pkgs_to_build, task):
490 """
491 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
492 """
493
494 # We set abort to False here to prevent unbuildable targets raising
495 # an exception when we're just generating data
496 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
497
498 return runlist, taskdata
499
500 ######## WARNING : this function requires cache_extra to be enabled ########
501
502 def generateTaskDepTreeData(self, pkgs_to_build, task):
503 """
504 Create a dependency graph of pkgs_to_build including reverse dependency
505 information.
506 """
507 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
508 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
509 rq.rqdata.prepare()
510 return self.buildDependTree(rq, taskdata)
511
512
513 def buildDependTree(self, rq, taskdata):
514 seen_fnids = []
515 depend_tree = {}
516 depend_tree["depends"] = {}
517 depend_tree["tdepends"] = {}
518 depend_tree["pn"] = {}
519 depend_tree["rdepends-pn"] = {}
520 depend_tree["packages"] = {}
521 depend_tree["rdepends-pkg"] = {}
522 depend_tree["rrecs-pkg"] = {}
523 depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
524
525 for task in xrange(len(rq.rqdata.runq_fnid)):
526 taskname = rq.rqdata.runq_task[task]
527 fnid = rq.rqdata.runq_fnid[task]
528 fn = taskdata.fn_index[fnid]
529 pn = self.recipecache.pkg_fn[fn]
530 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
531 if pn not in depend_tree["pn"]:
532 depend_tree["pn"][pn] = {}
533 depend_tree["pn"][pn]["filename"] = fn
534 depend_tree["pn"][pn]["version"] = version
535 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
536
537 # if we have extra caches, list all attributes they bring in
538 extra_info = []
539 for cache_class in self.caches_array:
540 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
541 cachefields = getattr(cache_class, 'cachefields', [])
542 extra_info = extra_info + cachefields
543
544 # for all attributes stored, add them to the dependency tree
545 for ei in extra_info:
546 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
547
548
549 for dep in rq.rqdata.runq_depends[task]:
550 depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
551 deppn = self.recipecache.pkg_fn[depfn]
552 dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
553 if not dotname in depend_tree["tdepends"]:
554 depend_tree["tdepends"][dotname] = []
555 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
556 if fnid not in seen_fnids:
557 seen_fnids.append(fnid)
558 packages = []
559
560 depend_tree["depends"][pn] = []
561 for dep in taskdata.depids[fnid]:
562 depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
563
564 depend_tree["rdepends-pn"][pn] = []
565 for rdep in taskdata.rdepids[fnid]:
566 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
567
568 rdepends = self.recipecache.rundeps[fn]
569 for package in rdepends:
570 depend_tree["rdepends-pkg"][package] = []
571 for rdepend in rdepends[package]:
572 depend_tree["rdepends-pkg"][package].append(rdepend)
573 packages.append(package)
574
575 rrecs = self.recipecache.runrecs[fn]
576 for package in rrecs:
577 depend_tree["rrecs-pkg"][package] = []
578 for rdepend in rrecs[package]:
579 depend_tree["rrecs-pkg"][package].append(rdepend)
580 if not package in packages:
581 packages.append(package)
582
583 for package in packages:
584 if package not in depend_tree["packages"]:
585 depend_tree["packages"][package] = {}
586 depend_tree["packages"][package]["pn"] = pn
587 depend_tree["packages"][package]["filename"] = fn
588 depend_tree["packages"][package]["version"] = version
589
590 return depend_tree
591
592 ######## WARNING : this function requires cache_extra to be enabled ########
593 def generatePkgDepTreeData(self, pkgs_to_build, task):
594 """
595 Create a dependency tree of pkgs_to_build, returning the data.
596 """
597 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
598 tasks_fnid = []
599 if len(taskdata.tasks_name) != 0:
600 for task in xrange(len(taskdata.tasks_name)):
601 tasks_fnid.append(taskdata.tasks_fnid[task])
602
603 seen_fnids = []
604 depend_tree = {}
605 depend_tree["depends"] = {}
606 depend_tree["pn"] = {}
607 depend_tree["rdepends-pn"] = {}
608 depend_tree["rdepends-pkg"] = {}
609 depend_tree["rrecs-pkg"] = {}
610
611 # if we have extra caches, list all attributes they bring in
612 extra_info = []
613 for cache_class in self.caches_array:
614 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
615 cachefields = getattr(cache_class, 'cachefields', [])
616 extra_info = extra_info + cachefields
617
618 for task in xrange(len(tasks_fnid)):
619 fnid = tasks_fnid[task]
620 fn = taskdata.fn_index[fnid]
621 pn = self.recipecache.pkg_fn[fn]
622
623 if pn not in depend_tree["pn"]:
624 depend_tree["pn"][pn] = {}
625 depend_tree["pn"][pn]["filename"] = fn
626 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
627 depend_tree["pn"][pn]["version"] = version
628 rdepends = self.recipecache.rundeps[fn]
629 rrecs = self.recipecache.runrecs[fn]
630 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
631
632 # for all extra attributes stored, add them to the dependency tree
633 for ei in extra_info:
634 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
635
636 if fnid not in seen_fnids:
637 seen_fnids.append(fnid)
638
639 depend_tree["depends"][pn] = []
640 for dep in taskdata.depids[fnid]:
641 item = taskdata.build_names_index[dep]
642 pn_provider = ""
643 targetid = taskdata.getbuild_id(item)
644 if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
645 id = taskdata.build_targets[targetid][0]
646 fn_provider = taskdata.fn_index[id]
647 pn_provider = self.recipecache.pkg_fn[fn_provider]
648 else:
649 pn_provider = item
650 depend_tree["depends"][pn].append(pn_provider)
651
652 depend_tree["rdepends-pn"][pn] = []
653 for rdep in taskdata.rdepids[fnid]:
654 item = taskdata.run_names_index[rdep]
655 pn_rprovider = ""
656 targetid = taskdata.getrun_id(item)
657 if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
658 id = taskdata.run_targets[targetid][0]
659 fn_rprovider = taskdata.fn_index[id]
660 pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
661 else:
662 pn_rprovider = item
663 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
664
665 depend_tree["rdepends-pkg"].update(rdepends)
666 depend_tree["rrecs-pkg"].update(rrecs)
667
668 return depend_tree
669
670 def generateDepTreeEvent(self, pkgs_to_build, task):
671 """
672 Create a task dependency graph of pkgs_to_build.
673 Generate an event with the result
674 """
675 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
676 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
677
678 def generateDotGraphFiles(self, pkgs_to_build, task):
679 """
680 Create a task dependency graph of pkgs_to_build.
681 Save the result to a set of .dot files.
682 """
683
684 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
685
686 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
687 depends_file = file('pn-depends.dot', 'w' )
688 buildlist_file = file('pn-buildlist', 'w' )
689 print("digraph depends {", file=depends_file)
690 for pn in depgraph["pn"]:
691 fn = depgraph["pn"][pn]["filename"]
692 version = depgraph["pn"][pn]["version"]
693 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
694 print("%s" % pn, file=buildlist_file)
695 buildlist_file.close()
696 logger.info("PN build list saved to 'pn-buildlist'")
697 for pn in depgraph["depends"]:
698 for depend in depgraph["depends"][pn]:
699 print('"%s" -> "%s"' % (pn, depend), file=depends_file)
700 for pn in depgraph["rdepends-pn"]:
701 for rdepend in depgraph["rdepends-pn"][pn]:
702 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
703 print("}", file=depends_file)
704 logger.info("PN dependencies saved to 'pn-depends.dot'")
705
706 depends_file = file('package-depends.dot', 'w' )
707 print("digraph depends {", file=depends_file)
708 for package in depgraph["packages"]:
709 pn = depgraph["packages"][package]["pn"]
710 fn = depgraph["packages"][package]["filename"]
711 version = depgraph["packages"][package]["version"]
712 if package == pn:
713 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
714 else:
715 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
716 for depend in depgraph["depends"][pn]:
717 print('"%s" -> "%s"' % (package, depend), file=depends_file)
718 for package in depgraph["rdepends-pkg"]:
719 for rdepend in depgraph["rdepends-pkg"][package]:
720 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
721 for package in depgraph["rrecs-pkg"]:
722 for rdepend in depgraph["rrecs-pkg"][package]:
723 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
724 print("}", file=depends_file)
725 logger.info("Package dependencies saved to 'package-depends.dot'")
726
727 tdepends_file = file('task-depends.dot', 'w' )
728 print("digraph depends {", file=tdepends_file)
729 for task in depgraph["tdepends"]:
730 (pn, taskname) = task.rsplit(".", 1)
731 fn = depgraph["pn"][pn]["filename"]
732 version = depgraph["pn"][pn]["version"]
733 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
734 for dep in depgraph["tdepends"][task]:
735 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
736 print("}", file=tdepends_file)
737 logger.info("Task dependencies saved to 'task-depends.dot'")
738
739 def show_appends_with_no_recipes( self ):
740 appends_without_recipes = [self.collection.appendlist[recipe]
741 for recipe in self.collection.appendlist
742 if recipe not in self.collection.appliedappendlist]
743 if appends_without_recipes:
744 appendlines = (' %s' % append
745 for appends in appends_without_recipes
746 for append in appends)
747 msg = 'No recipes available for:\n%s' % '\n'.join(appendlines)
748 warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
749 self.data, False) or "no"
750 if warn_only.lower() in ("1", "yes", "true"):
751 bb.warn(msg)
752 else:
753 bb.fatal(msg)
754
755 def handlePrefProviders(self):
756
757 localdata = data.createCopy(self.data)
758 bb.data.update_data(localdata)
759 bb.data.expandKeys(localdata)
760
761 # Handle PREFERRED_PROVIDERS
762 for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
763 try:
764 (providee, provider) = p.split(':')
765 except:
766 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
767 continue
768 if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
769 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
770 self.recipecache.preferred[providee] = provider
771
772 def findCoreBaseFiles(self, subdir, configfile):
773 corebase = self.data.getVar('COREBASE', True) or ""
774 paths = []
775 for root, dirs, files in os.walk(corebase + '/' + subdir):
776 for d in dirs:
777 configfilepath = os.path.join(root, d, configfile)
778 if os.path.exists(configfilepath):
779 paths.append(os.path.join(root, d))
780
781 if paths:
782 bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
783
784 def findConfigFilePath(self, configfile):
785 """
786 Find the location on disk of configfile and if it exists and was parsed by BitBake
787 emit the ConfigFilePathFound event with the path to the file.
788 """
789 path = bb.cookerdata.findConfigFile(configfile, self.data)
790 if not path:
791 return
792
793 # Generate a list of parsed configuration files by searching the files
794 # listed in the __depends and __base_depends variables with a .conf suffix.
795 conffiles = []
796 dep_files = self.data.getVar('__base_depends') or []
797 dep_files = dep_files + (self.data.getVar('__depends') or [])
798
799 for f in dep_files:
800 if f[0].endswith(".conf"):
801 conffiles.append(f[0])
802
803 _, conf, conffile = path.rpartition("conf/")
804 match = os.path.join(conf, conffile)
805 # Try and find matches for conf/conffilename.conf as we don't always
806 # have the full path to the file.
807 for cfg in conffiles:
808 if cfg.endswith(match):
809 bb.event.fire(bb.event.ConfigFilePathFound(path),
810 self.data)
811 break
812
813 def findFilesMatchingInDir(self, filepattern, directory):
814 """
815 Searches for files matching the regex 'pattern' which are children of
816 'directory' in each BBPATH. i.e. to find all rootfs package classes available
817 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
818 or to find all machine configuration files one could call:
819 findFilesMatchingInDir(self, 'conf/machines', 'conf')
820 """
821 import re
822
823 matches = []
824 p = re.compile(re.escape(filepattern))
825 bbpaths = self.data.getVar('BBPATH', True).split(':')
826 for path in bbpaths:
827 dirpath = os.path.join(path, directory)
828 if os.path.exists(dirpath):
829 for root, dirs, files in os.walk(dirpath):
830 for f in files:
831 if p.search(f):
832 matches.append(f)
833
834 if matches:
835 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
836
837 def findConfigFiles(self, varname):
838 """
839 Find config files which are appropriate values for varname.
840 i.e. MACHINE, DISTRO
841 """
842 possible = []
843 var = varname.lower()
844
845 data = self.data
846 # iterate configs
847 bbpaths = data.getVar('BBPATH', True).split(':')
848 for path in bbpaths:
849 confpath = os.path.join(path, "conf", var)
850 if os.path.exists(confpath):
851 for root, dirs, files in os.walk(confpath):
852 # get all child files, these are appropriate values
853 for f in files:
854 val, sep, end = f.rpartition('.')
855 if end == 'conf':
856 possible.append(val)
857
858 if possible:
859 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
860
861 def findInheritsClass(self, klass):
862 """
863 Find all recipes which inherit the specified class
864 """
865 pkg_list = []
866
867 for pfn in self.recipecache.pkg_fn:
868 inherits = self.recipecache.inherits.get(pfn, None)
869 if inherits and inherits.count(klass) > 0:
870 pkg_list.append(self.recipecache.pkg_fn[pfn])
871
872 return pkg_list
873
874 def generateTargetsTree(self, klass=None, pkgs=[]):
875 """
876 Generate a dependency tree of buildable targets
877 Generate an event with the result
878 """
879 # if the caller hasn't specified a pkgs list default to universe
880 if not len(pkgs):
881 pkgs = ['universe']
882 # if inherited_class passed ensure all recipes which inherit the
883 # specified class are included in pkgs
884 if klass:
885 extra_pkgs = self.findInheritsClass(klass)
886 pkgs = pkgs + extra_pkgs
887
888 # generate a dependency tree for all our packages
889 tree = self.generatePkgDepTreeData(pkgs, 'build')
890 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
891
892 def buildWorldTargetList(self):
893 """
894 Build package list for "bitbake world"
895 """
896 parselog.debug(1, "collating packages for \"world\"")
897 for f in self.recipecache.possible_world:
898 terminal = True
899 pn = self.recipecache.pkg_fn[f]
900
901 for p in self.recipecache.pn_provides[pn]:
902 if p.startswith('virtual/'):
903 parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
904 terminal = False
905 break
906 for pf in self.recipecache.providers[p]:
907 if self.recipecache.pkg_fn[pf] != pn:
908 parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
909 terminal = False
910 break
911 if terminal:
912 self.recipecache.world_target.add(pn)
913
914 def interactiveMode( self ):
915 """Drop off into a shell"""
916 try:
917 from bb import shell
918 except ImportError:
919 parselog.exception("Interactive mode not available")
920 sys.exit(1)
921 else:
922 shell.start( self )
923
924
925 def handleCollections( self, collections ):
926 """Handle collections"""
927 errors = False
928 self.recipecache.bbfile_config_priorities = []
929 if collections:
930 collection_priorities = {}
931 collection_depends = {}
932 collection_list = collections.split()
933 min_prio = 0
934 for c in collection_list:
935 # Get collection priority if defined explicitly
936 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
937 if priority:
938 try:
939 prio = int(priority)
940 except ValueError:
941 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
942 errors = True
943 if min_prio == 0 or prio < min_prio:
944 min_prio = prio
945 collection_priorities[c] = prio
946 else:
947 collection_priorities[c] = None
948
949 # Check dependencies and store information for priority calculation
950 deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
951 if deps:
952 depnamelist = []
953 deplist = deps.split()
954 for dep in deplist:
955 depsplit = dep.split(':')
956 if len(depsplit) > 1:
957 try:
958 depver = int(depsplit[1])
959 except ValueError:
960 parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
961 errors = True
962 continue
963 else:
964 depver = None
965 dep = depsplit[0]
966 depnamelist.append(dep)
967
968 if dep in collection_list:
969 if depver:
970 layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
971 if layerver:
972 try:
973 lver = int(layerver)
974 except ValueError:
975 parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
976 errors = True
977 continue
978 if lver != depver:
979 parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
980 errors = True
981 else:
982 parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
983 errors = True
984 else:
985 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
986 errors = True
987 collection_depends[c] = depnamelist
988 else:
989 collection_depends[c] = []
990
991 # Recursively work out collection priorities based on dependencies
992 def calc_layer_priority(collection):
993 if not collection_priorities[collection]:
994 max_depprio = min_prio
995 for dep in collection_depends[collection]:
996 calc_layer_priority(dep)
997 depprio = collection_priorities[dep]
998 if depprio > max_depprio:
999 max_depprio = depprio
1000 max_depprio += 1
1001 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1002 collection_priorities[collection] = max_depprio
1003
1004 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1005 for c in collection_list:
1006 calc_layer_priority(c)
1007 regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
1008 if regex == None:
1009 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1010 errors = True
1011 continue
1012 try:
1013 cre = re.compile(regex)
1014 except re.error:
1015 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1016 errors = True
1017 continue
1018 self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1019 if errors:
1020 # We've already printed the actual error(s)
1021 raise CollectionError("Errors during parsing layer configuration")
1022
1023 def buildSetVars(self):
1024 """
1025 Setup any variables needed before starting a build
1026 """
1027 if not self.data.getVar("BUILDNAME"):
1028 self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'))
1029 self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()))
1030
1031 def matchFiles(self, bf):
1032 """
1033 Find the .bb files which match the expression in 'buildfile'.
1034 """
1035 if bf.startswith("/") or bf.startswith("../"):
1036 bf = os.path.abspath(bf)
1037
1038 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1039 filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data)
1040 try:
1041 os.stat(bf)
1042 bf = os.path.abspath(bf)
1043 return [bf]
1044 except OSError:
1045 regexp = re.compile(bf)
1046 matches = []
1047 for f in filelist:
1048 if regexp.search(f) and os.path.isfile(f):
1049 matches.append(f)
1050 return matches
1051
1052 def matchFile(self, buildfile):
1053 """
1054 Find the .bb file which matches the expression in 'buildfile'.
1055 Raise an error if multiple files
1056 """
1057 matches = self.matchFiles(buildfile)
1058 if len(matches) != 1:
1059 if matches:
1060 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1061 if matches:
1062 for f in matches:
1063 msg += "\n %s" % f
1064 parselog.error(msg)
1065 else:
1066 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1067 raise NoSpecificMatch
1068 return matches[0]
1069
1070 def buildFile(self, buildfile, task):
1071 """
1072 Build the file matching regexp buildfile
1073 """
1074
1075 # Too many people use -b because they think it's how you normally
1076 # specify a target to be built, so show a warning
1077 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1078
1079 # Parse the configuration here. We need to do it explicitly here since
1080 # buildFile() doesn't use the cache
1081 self.parseConfiguration()
1082
1083 # If we are told to do the None task then query the default task
1084 if (task == None):
1085 task = self.configuration.cmd
1086
1087 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
1088 fn = self.matchFile(fn)
1089
1090 self.buildSetVars()
1091
1092 infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
1093 self.data,
1094 self.caches_array)
1095 infos = dict(infos)
1096
1097 fn = bb.cache.Cache.realfn2virtual(fn, cls)
1098 try:
1099 info_array = infos[fn]
1100 except KeyError:
1101 bb.fatal("%s does not exist" % fn)
1102
1103 if info_array[0].skipped:
1104 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1105
1106 self.recipecache.add_from_recipeinfo(fn, info_array)
1107
1108 # Tweak some variables
1109 item = info_array[0].pn
1110 self.recipecache.ignored_dependencies = set()
1111 self.recipecache.bbfile_priority[fn] = 1
1112
1113 # Remove external dependencies
1114 self.recipecache.task_deps[fn]['depends'] = {}
1115 self.recipecache.deps[fn] = []
1116 self.recipecache.rundeps[fn] = []
1117 self.recipecache.runrecs[fn] = []
1118
1119 # Invalidate task for target if force mode active
1120 if self.configuration.force:
1121 logger.verbose("Invalidate task %s, %s", task, fn)
1122 bb.parse.siggen.invalidate_task('do_%s' % task, self.recipecache, fn)
1123
1124 # Setup taskdata structure
1125 taskdata = bb.taskdata.TaskData(self.configuration.abort)
1126 taskdata.add_provider(self.data, self.recipecache, item)
1127
1128 buildname = self.data.getVar("BUILDNAME")
1129 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data)
1130
1131 # Execute the runqueue
1132 runlist = [[item, "do_%s" % task]]
1133
1134 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1135
1136 def buildFileIdle(server, rq, abort):
1137
1138 msg = None
1139 if abort or self.state == state.forceshutdown:
1140 rq.finish_runqueue(True)
1141 msg = "Forced shutdown"
1142 elif self.state == state.shutdown:
1143 rq.finish_runqueue(False)
1144 msg = "Stopped build"
1145 failures = 0
1146 try:
1147 retval = rq.execute_runqueue()
1148 except runqueue.TaskFailure as exc:
1149 failures += len(exc.args)
1150 retval = False
1151 except SystemExit as exc:
1152 self.command.finishAsyncCommand()
1153 return False
1154
1155 if not retval:
1156 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data)
1157 self.command.finishAsyncCommand(msg)
1158 return False
1159 if retval is True:
1160 return True
1161 return retval
1162
1163 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1164
1165 def buildTargets(self, targets, task):
1166 """
1167 Attempt to build the targets specified
1168 """
1169
1170 def buildTargetsIdle(server, rq, abort):
1171 msg = None
1172 if abort or self.state == state.forceshutdown:
1173 rq.finish_runqueue(True)
1174 msg = "Forced shutdown"
1175 elif self.state == state.shutdown:
1176 rq.finish_runqueue(False)
1177 msg = "Stopped build"
1178 failures = 0
1179 try:
1180 retval = rq.execute_runqueue()
1181 except runqueue.TaskFailure as exc:
1182 failures += len(exc.args)
1183 retval = False
1184 except SystemExit as exc:
1185 self.command.finishAsyncCommand()
1186 return False
1187
1188 if not retval:
1189 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data)
1190 self.command.finishAsyncCommand(msg)
1191 return False
1192 if retval is True:
1193 return True
1194 return retval
1195
1196 self.buildSetVars()
1197
1198 taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
1199
1200 buildname = self.data.getVar("BUILDNAME")
1201 bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data)
1202
1203 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1204 if 'universe' in targets:
1205 rq.rqdata.warn_multi_bb = True
1206
1207 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1208
1209
1210 def getAllKeysWithFlags(self, flaglist):
1211 dump = {}
1212 for k in self.data.keys():
1213 try:
1214 v = self.data.getVar(k, True)
1215 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1216 dump[k] = {
1217 'v' : v ,
1218 'history' : self.data.varhistory.variable(k),
1219 }
1220 for d in flaglist:
1221 dump[k][d] = self.data.getVarFlag(k, d)
1222 except Exception as e:
1223 print(e)
1224 return dump
1225
1226
1227 def generateNewImage(self, image, base_image, package_queue, timestamp, description):
1228 '''
1229 Create a new image with a "require"/"inherit" base_image statement
1230 '''
1231 import re
1232 if timestamp:
1233 image_name = os.path.splitext(image)[0]
1234 timestr = time.strftime("-%Y%m%d-%H%M%S")
1235 dest = image_name + str(timestr) + ".bb"
1236 else:
1237 if not image.endswith(".bb"):
1238 dest = image + ".bb"
1239 else:
1240 dest = image
1241
1242 basename = False
1243 if base_image:
1244 with open(base_image, 'r') as f:
1245 require_line = f.readline()
1246 p = re.compile("IMAGE_BASENAME *=")
1247 for line in f:
1248 if p.search(line):
1249 basename = True
1250
1251 with open(dest, "w") as imagefile:
1252 if base_image is None:
1253 imagefile.write("inherit core-image\n")
1254 else:
1255 topdir = self.data.getVar("TOPDIR")
1256 if topdir in base_image:
1257 base_image = require_line.split()[1]
1258 imagefile.write("require " + base_image + "\n")
1259 image_install = "IMAGE_INSTALL = \""
1260 for package in package_queue:
1261 image_install += str(package) + " "
1262 image_install += "\"\n"
1263 imagefile.write(image_install)
1264
1265 description_var = "DESCRIPTION = \"" + description + "\"\n"
1266 imagefile.write(description_var)
1267
1268 if basename:
1269 # If this is overwritten in a inherited image, reset it to default
1270 image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
1271 imagefile.write(image_basename)
1272
1273 self.state = state.initial
1274 if timestamp:
1275 return timestr
1276
1277 # This is called for all async commands when self.state != running
1278 def updateCache(self):
1279 if self.state == state.running:
1280 return
1281
1282 if self.state in (state.shutdown, state.forceshutdown):
1283 if hasattr(self.parser, 'shutdown'):
1284 self.parser.shutdown(clean=False, force = True)
1285 raise bb.BBHandledException()
1286
1287 if self.state != state.parsing:
1288 self.parseConfiguration ()
1289 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1290 bb.event.fire(bb.event.SanityCheck(False), self.data)
1291
1292 ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
1293 self.recipecache.ignored_dependencies = set(ignore.split())
1294
1295 for dep in self.configuration.extra_assume_provided:
1296 self.recipecache.ignored_dependencies.add(dep)
1297
1298 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1299 (filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data)
1300
1301 self.data.renameVar("__depends", "__base_depends")
1302
1303 self.parser = CookerParser(self, filelist, masked)
1304 self.state = state.parsing
1305
1306 if not self.parser.parse_next():
1307 collectlog.debug(1, "parsing complete")
1308 if self.parser.error:
1309 raise bb.BBHandledException()
1310 self.show_appends_with_no_recipes()
1311 self.handlePrefProviders()
1312 self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
1313 self.state = state.running
1314 return None
1315
1316 return True
1317
1318 def checkPackages(self, pkgs_to_build):
1319
1320 # Return a copy, don't modify the original
1321 pkgs_to_build = pkgs_to_build[:]
1322
1323 if len(pkgs_to_build) == 0:
1324 raise NothingToBuild
1325
1326 ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split()
1327 for pkg in pkgs_to_build:
1328 if pkg in ignore:
1329 parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1330
1331 if 'world' in pkgs_to_build:
1332 self.buildWorldTargetList()
1333 pkgs_to_build.remove('world')
1334 for t in self.recipecache.world_target:
1335 pkgs_to_build.append(t)
1336
1337 if 'universe' in pkgs_to_build:
1338 parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
1339 parselog.debug(1, "collating packages for \"universe\"")
1340 pkgs_to_build.remove('universe')
1341 for t in self.recipecache.universe_target:
1342 pkgs_to_build.append(t)
1343
1344 return pkgs_to_build
1345
1346
1347
1348
1349 def pre_serve(self):
1350 # Empty the environment. The environment will be populated as
1351 # necessary from the data store.
1352 #bb.utils.empty_environment()
1353 try:
1354 self.prhost = prserv.serv.auto_start(self.data)
1355 except prserv.serv.PRServiceConfigError:
1356 bb.event.fire(CookerExit(), self.event_data)
1357 self.state = state.error
1358 return
1359
1360 def post_serve(self):
1361 prserv.serv.auto_shutdown(self.data)
1362 bb.event.fire(CookerExit(), self.event_data)
1363
1364 def shutdown(self, force = False):
1365 if force:
1366 self.state = state.forceshutdown
1367 else:
1368 self.state = state.shutdown
1369
1370 def finishcommand(self):
1371 self.state = state.initial
1372
1373 def reset(self):
1374 self.initConfigurationData()
1375
1376def server_main(cooker, func, *args):
1377 cooker.pre_serve()
1378
1379 if cooker.configuration.profile:
1380 try:
1381 import cProfile as profile
1382 except:
1383 import profile
1384 prof = profile.Profile()
1385
1386 ret = profile.Profile.runcall(prof, func, *args)
1387
1388 prof.dump_stats("profile.log")
1389 bb.utils.process_profilelog("profile.log")
1390 print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
1391
1392 else:
1393 ret = func(*args)
1394
1395 cooker.post_serve()
1396
1397 return ret
1398
1399class CookerExit(bb.event.Event):
1400 """
1401 Notify clients of the Cooker shutdown
1402 """
1403
1404 def __init__(self):
1405 bb.event.Event.__init__(self)
1406
1407
1408class CookerCollectFiles(object):
1409 def __init__(self, priorities):
1410 self.appendlist = {}
1411 self.appliedappendlist = []
1412 self.bbfile_config_priorities = priorities
1413
1414 def calc_bbfile_priority( self, filename, matched = None ):
1415 for _, _, regex, pri in self.bbfile_config_priorities:
1416 if regex.match(filename):
1417 if matched != None:
1418 if not regex in matched:
1419 matched.add(regex)
1420 return pri
1421 return 0
1422
1423 def get_bbfiles(self):
1424 """Get list of default .bb files by reading out the current directory"""
1425 path = os.getcwd()
1426 contents = os.listdir(path)
1427 bbfiles = []
1428 for f in contents:
1429 if f.endswith(".bb"):
1430 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1431 return bbfiles
1432
1433 def find_bbfiles(self, path):
1434 """Find all the .bb and .bbappend files in a directory"""
1435 found = []
1436 for dir, dirs, files in os.walk(path):
1437 for ignored in ('SCCS', 'CVS', '.svn'):
1438 if ignored in dirs:
1439 dirs.remove(ignored)
1440 found += [os.path.join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
1441
1442 return found
1443
1444 def collect_bbfiles(self, config, eventdata):
1445 """Collect all available .bb build files"""
1446 masked = 0
1447
1448 collectlog.debug(1, "collecting .bb files")
1449
1450 files = (config.getVar( "BBFILES", True) or "").split()
1451 config.setVar("BBFILES", " ".join(files))
1452
1453 # Sort files by priority
1454 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1455
1456 if not len(files):
1457 files = self.get_bbfiles()
1458
1459 if not len(files):
1460 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1461 bb.event.fire(CookerExit(), eventdata)
1462
1463 # Can't use set here as order is important
1464 newfiles = []
1465 for f in files:
1466 if os.path.isdir(f):
1467 dirfiles = self.find_bbfiles(f)
1468 for g in dirfiles:
1469 if g not in newfiles:
1470 newfiles.append(g)
1471 else:
1472 globbed = glob.glob(f)
1473 if not globbed and os.path.exists(f):
1474 globbed = [f]
1475 for g in globbed:
1476 if g not in newfiles:
1477 newfiles.append(g)
1478
1479 bbmask = config.getVar('BBMASK', True)
1480
1481 if bbmask:
1482 try:
1483 bbmask_compiled = re.compile(bbmask)
1484 except sre_constants.error:
1485 collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
1486 return list(newfiles), 0
1487
1488 bbfiles = []
1489 bbappend = []
1490 for f in newfiles:
1491 if bbmask and bbmask_compiled.search(f):
1492 collectlog.debug(1, "skipping masked file %s", f)
1493 masked += 1
1494 continue
1495 if f.endswith('.bb'):
1496 bbfiles.append(f)
1497 elif f.endswith('.bbappend'):
1498 bbappend.append(f)
1499 else:
1500 collectlog.debug(1, "skipping %s: unknown file extension", f)
1501
1502 # Build a list of .bbappend files for each .bb file
1503 for f in bbappend:
1504 base = os.path.basename(f).replace('.bbappend', '.bb')
1505 if not base in self.appendlist:
1506 self.appendlist[base] = []
1507 if f not in self.appendlist[base]:
1508 self.appendlist[base].append(f)
1509
1510 # Find overlayed recipes
1511 # bbfiles will be in priority order which makes this easy
1512 bbfile_seen = dict()
1513 self.overlayed = defaultdict(list)
1514 for f in reversed(bbfiles):
1515 base = os.path.basename(f)
1516 if base not in bbfile_seen:
1517 bbfile_seen[base] = f
1518 else:
1519 topfile = bbfile_seen[base]
1520 self.overlayed[topfile].append(f)
1521
1522 return (bbfiles, masked)
1523
1524 def get_file_appends(self, fn):
1525 """
1526 Returns a list of .bbappend files to apply to fn
1527 """
1528 filelist = []
1529 f = os.path.basename(fn)
1530 for bbappend in self.appendlist:
1531 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1532 self.appliedappendlist.append(bbappend)
1533 for filename in self.appendlist[bbappend]:
1534 filelist.append(filename)
1535 return filelist
1536
1537 def collection_priorities(self, pkgfns):
1538
1539 priorities = {}
1540
1541 # Calculate priorities for each file
1542 matched = set()
1543 for p in pkgfns:
1544 realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
1545 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1546
1547 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1548 unmatched = set()
1549 for _, _, regex, pri in self.bbfile_config_priorities:
1550 if not regex in matched:
1551 unmatched.add(regex)
1552
1553 def findmatch(regex):
1554 for bbfile in self.appendlist:
1555 for append in self.appendlist[bbfile]:
1556 if regex.match(append):
1557 return True
1558 return False
1559
1560 for unmatch in unmatched.copy():
1561 if findmatch(unmatch):
1562 unmatched.remove(unmatch)
1563
1564 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1565 if regex in unmatched:
1566 collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
1567
1568 return priorities
1569
1570class ParsingFailure(Exception):
1571 def __init__(self, realexception, recipe):
1572 self.realexception = realexception
1573 self.recipe = recipe
1574 Exception.__init__(self, realexception, recipe)
1575
1576class Feeder(multiprocessing.Process):
1577 def __init__(self, jobs, to_parsers, quit):
1578 self.quit = quit
1579 self.jobs = jobs
1580 self.to_parsers = to_parsers
1581 multiprocessing.Process.__init__(self)
1582
1583 def run(self):
1584 while True:
1585 try:
1586 quit = self.quit.get_nowait()
1587 except Queue.Empty:
1588 pass
1589 else:
1590 if quit == 'cancel':
1591 self.to_parsers.cancel_join_thread()
1592 break
1593
1594 try:
1595 job = self.jobs.pop()
1596 except IndexError:
1597 break
1598
1599 try:
1600 self.to_parsers.put(job, timeout=0.5)
1601 except Queue.Full:
1602 self.jobs.insert(0, job)
1603 continue
1604
1605class Parser(multiprocessing.Process):
1606 def __init__(self, jobs, results, quit, init, profile):
1607 self.jobs = jobs
1608 self.results = results
1609 self.quit = quit
1610 self.init = init
1611 multiprocessing.Process.__init__(self)
1612 self.context = bb.utils.get_context().copy()
1613 self.handlers = bb.event.get_class_handlers().copy()
1614 self.profile = profile
1615
1616 def run(self):
1617
1618 if not self.profile:
1619 self.realrun()
1620 return
1621
1622 try:
1623 import cProfile as profile
1624 except:
1625 import profile
1626 prof = profile.Profile()
1627 try:
1628 profile.Profile.runcall(prof, self.realrun)
1629 finally:
1630 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1631 prof.dump_stats(logfile)
1632 bb.utils.process_profilelog(logfile)
1633 print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile))
1634
1635 def realrun(self):
1636 if self.init:
1637 self.init()
1638
1639 pending = []
1640 while True:
1641 try:
1642 self.quit.get_nowait()
1643 except Queue.Empty:
1644 pass
1645 else:
1646 self.results.cancel_join_thread()
1647 break
1648
1649 if pending:
1650 result = pending.pop()
1651 else:
1652 try:
1653 job = self.jobs.get(timeout=0.25)
1654 except Queue.Empty:
1655 continue
1656
1657 if job is None:
1658 break
1659 result = self.parse(*job)
1660
1661 try:
1662 self.results.put(result, timeout=0.25)
1663 except Queue.Full:
1664 pending.append(result)
1665
1666 def parse(self, filename, appends, caches_array):
1667 try:
1668 # Reset our environment and handlers to the original settings
1669 bb.utils.set_context(self.context.copy())
1670 bb.event.set_class_handlers(self.handlers.copy())
1671 return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
1672 except Exception as exc:
1673 tb = sys.exc_info()[2]
1674 exc.recipe = filename
1675 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1676 return True, exc
1677 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1678 # and for example a worker thread doesn't just exit on its own in response to
1679 # a SystemExit event for example.
1680 except BaseException as exc:
1681 return True, ParsingFailure(exc, filename)
1682
1683class CookerParser(object):
1684 def __init__(self, cooker, filelist, masked):
1685 self.filelist = filelist
1686 self.cooker = cooker
1687 self.cfgdata = cooker.data
1688 self.cfghash = cooker.data_hash
1689
1690 # Accounting statistics
1691 self.parsed = 0
1692 self.cached = 0
1693 self.error = 0
1694 self.masked = masked
1695
1696 self.skipped = 0
1697 self.virtuals = 0
1698 self.total = len(filelist)
1699
1700 self.current = 0
1701 self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
1702 multiprocessing.cpu_count())
1703
1704 self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
1705 self.fromcache = []
1706 self.willparse = []
1707 for filename in self.filelist:
1708 appends = self.cooker.collection.get_file_appends(filename)
1709 if not self.bb_cache.cacheValid(filename, appends):
1710 self.willparse.append((filename, appends, cooker.caches_array))
1711 else:
1712 self.fromcache.append((filename, appends))
1713 self.toparse = self.total - len(self.fromcache)
1714 self.progress_chunk = max(self.toparse / 100, 1)
1715
1716 self.start()
1717 self.haveshutdown = False
1718
1719 def start(self):
1720 self.results = self.load_cached()
1721 self.processes = []
1722 if self.toparse:
1723 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
1724 def init():
1725 Parser.cfg = self.cfgdata
1726 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
1727 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1)
1728
1729 self.feeder_quit = multiprocessing.Queue(maxsize=1)
1730 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
1731 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
1732 self.result_queue = multiprocessing.Queue()
1733 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
1734 self.feeder.start()
1735 for i in range(0, self.num_processes):
1736 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
1737 parser.start()
1738 self.processes.append(parser)
1739
1740 self.results = itertools.chain(self.results, self.parse_generator())
1741
1742 def shutdown(self, clean=True, force=False):
1743 if not self.toparse:
1744 return
1745 if self.haveshutdown:
1746 return
1747 self.haveshutdown = True
1748
1749 if clean:
1750 event = bb.event.ParseCompleted(self.cached, self.parsed,
1751 self.skipped, self.masked,
1752 self.virtuals, self.error,
1753 self.total)
1754
1755 bb.event.fire(event, self.cfgdata)
1756 self.feeder_quit.put(None)
1757 for process in self.processes:
1758 self.jobs.put(None)
1759 else:
1760 self.feeder_quit.put('cancel')
1761
1762 self.parser_quit.cancel_join_thread()
1763 for process in self.processes:
1764 self.parser_quit.put(None)
1765
1766 self.jobs.cancel_join_thread()
1767
1768 for process in self.processes:
1769 if force:
1770 process.join(.1)
1771 process.terminate()
1772 else:
1773 process.join()
1774 self.feeder.join()
1775
1776 sync = threading.Thread(target=self.bb_cache.sync)
1777 sync.start()
1778 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
1779 bb.codeparser.parser_cache_savemerge(self.cooker.data)
1780 bb.fetch.fetcher_parse_done(self.cooker.data)
1781
1782 def load_cached(self):
1783 for filename, appends in self.fromcache:
1784 cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
1785 yield not cached, infos
1786
1787 def parse_generator(self):
1788 while True:
1789 if self.parsed >= self.toparse:
1790 break
1791
1792 try:
1793 result = self.result_queue.get(timeout=0.25)
1794 except Queue.Empty:
1795 pass
1796 else:
1797 value = result[1]
1798 if isinstance(value, BaseException):
1799 raise value
1800 else:
1801 yield result
1802
1803 def parse_next(self):
1804 result = []
1805 parsed = None
1806 try:
1807 parsed, result = self.results.next()
1808 except StopIteration:
1809 self.shutdown()
1810 return False
1811 except bb.BBHandledException as exc:
1812 self.error += 1
1813 logger.error('Failed to parse recipe: %s' % exc.recipe)
1814 self.shutdown(clean=False)
1815 return False
1816 except ParsingFailure as exc:
1817 self.error += 1
1818 logger.error('Unable to parse %s: %s' %
1819 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
1820 self.shutdown(clean=False)
1821 return False
1822 except bb.parse.ParseError as exc:
1823 self.error += 1
1824 logger.error(str(exc))
1825 self.shutdown(clean=False)
1826 return False
1827 except bb.data_smart.ExpansionError as exc:
1828 self.error += 1
1829 _, value, _ = sys.exc_info()
1830 logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
1831 self.shutdown(clean=False)
1832 return False
1833 except SyntaxError as exc:
1834 self.error += 1
1835 logger.error('Unable to parse %s', exc.recipe)
1836 self.shutdown(clean=False)
1837 return False
1838 except Exception as exc:
1839 self.error += 1
1840 etype, value, tb = sys.exc_info()
1841 if hasattr(value, "recipe"):
1842 logger.error('Unable to parse %s', value.recipe,
1843 exc_info=(etype, value, exc.traceback))
1844 else:
1845 # Most likely, an exception occurred during raising an exception
1846 import traceback
1847 logger.error('Exception during parse: %s' % traceback.format_exc())
1848 self.shutdown(clean=False)
1849 return False
1850
1851 self.current += 1
1852 self.virtuals += len(result)
1853 if parsed:
1854 self.parsed += 1
1855 if self.parsed % self.progress_chunk == 0:
1856 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
1857 self.cfgdata)
1858 else:
1859 self.cached += 1
1860
1861 for virtualfn, info_array in result:
1862 if info_array[0].skipped:
1863 self.skipped += 1
1864 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
1865 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
1866 parsed=parsed)
1867 return True
1868
1869 def reparse(self, filename):
1870 infos = self.bb_cache.parse(filename,
1871 self.cooker.collection.get_file_appends(filename),
1872 self.cfgdata, self.cooker.caches_array)
1873 for vfn, info_array in infos:
1874 self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
new file mode 100644
index 0000000000..b9b9e16675
--- /dev/null
+++ b/bitbake/lib/bb/cookerdata.py
@@ -0,0 +1,305 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import os, sys
26from functools import wraps
27import logging
28import bb
29from bb import data
30import bb.parse
31
32logger = logging.getLogger("BitBake")
33parselog = logging.getLogger("BitBake.Parsing")
34
35class ConfigParameters(object):
36 def __init__(self):
37 self.options, targets = self.parseCommandLine()
38 self.environment = self.parseEnvironment()
39
40 self.options.pkgs_to_build = targets or []
41
42 self.options.tracking = False
43 if hasattr(self.options, "show_environment") and self.options.show_environment:
44 self.options.tracking = True
45
46 for key, val in self.options.__dict__.items():
47 setattr(self, key, val)
48
49 def parseCommandLine(self):
50 raise Exception("Caller must implement commandline option parsing")
51
52 def parseEnvironment(self):
53 return os.environ.copy()
54
55 def updateFromServer(self, server):
56 if not self.options.cmd:
57 defaulttask, error = server.runCommand(["getVariable", "BB_DEFAULT_TASK"])
58 if error:
59 raise Exception("Unable to get the value of BB_DEFAULT_TASK from the server: %s" % error)
60 self.options.cmd = defaulttask or "build"
61 _, error = server.runCommand(["setConfig", "cmd", self.options.cmd])
62 if error:
63 raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error)
64
65 if not self.options.pkgs_to_build:
66 bbpkgs, error = server.runCommand(["getVariable", "BBPKGS"])
67 if error:
68 raise Exception("Unable to get the value of BBPKGS from the server: %s" % error)
69 if bbpkgs:
70 self.options.pkgs_to_build.extend(bbpkgs.split())
71
72 def parseActions(self):
73 # Parse any commandline into actions
74 action = {'action':None, 'msg':None}
75 if self.options.show_environment:
76 if 'world' in self.options.pkgs_to_build:
77 action['msg'] = "'world' is not a valid target for --environment."
78 elif 'universe' in self.options.pkgs_to_build:
79 action['msg'] = "'universe' is not a valid target for --environment."
80 elif len(self.options.pkgs_to_build) > 1:
81 action['msg'] = "Only one target can be used with the --environment option."
82 elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
83 action['msg'] = "No target should be used with the --environment and --buildfile options."
84 elif len(self.options.pkgs_to_build) > 0:
85 action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
86 else:
87 action['action'] = ["showEnvironment", self.options.buildfile]
88 elif self.options.buildfile is not None:
89 action['action'] = ["buildFile", self.options.buildfile, self.options.cmd]
90 elif self.options.revisions_changed:
91 action['action'] = ["compareRevisions"]
92 elif self.options.show_versions:
93 action['action'] = ["showVersions"]
94 elif self.options.parse_only:
95 action['action'] = ["parseFiles"]
96 elif self.options.dot_graph:
97 if self.options.pkgs_to_build:
98 action['action'] = ["generateDotGraph", self.options.pkgs_to_build, self.options.cmd]
99 else:
100 action['msg'] = "Please specify a package name for dependency graph generation."
101 else:
102 if self.options.pkgs_to_build:
103 action['action'] = ["buildTargets", self.options.pkgs_to_build, self.options.cmd]
104 else:
105 #action['msg'] = "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information."
106 action = None
107 self.options.initialaction = action
108 return action
109
110class CookerConfiguration(object):
111 """
112 Manages build options and configurations for one run
113 """
114
115 def __init__(self):
116 self.debug_domains = []
117 self.extra_assume_provided = []
118 self.prefile = []
119 self.postfile = []
120 self.debug = 0
121 self.cmd = None
122 self.abort = True
123 self.force = False
124 self.profile = False
125 self.nosetscene = False
126 self.invalidate_stamp = False
127 self.dump_signatures = []
128 self.dry_run = False
129 self.tracking = False
130 self.interface = []
131
132 self.env = {}
133
134 def setConfigParameters(self, parameters):
135 for key in self.__dict__.keys():
136 if key in parameters.options.__dict__:
137 setattr(self, key, parameters.options.__dict__[key])
138 self.env = parameters.environment.copy()
139 self.tracking = parameters.tracking
140
141 def setServerRegIdleCallback(self, srcb):
142 self.server_register_idlecallback = srcb
143
144 def __getstate__(self):
145 state = {}
146 for key in self.__dict__.keys():
147 if key == "server_register_idlecallback":
148 state[key] = None
149 else:
150 state[key] = getattr(self, key)
151 return state
152
153 def __setstate__(self,state):
154 for k in state:
155 setattr(self, k, state[k])
156
157
158def catch_parse_error(func):
159 """Exception handling bits for our parsing"""
160 @wraps(func)
161 def wrapped(fn, *args):
162 try:
163 return func(fn, *args)
164 except (IOError, bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
165 import traceback
166 parselog.critical( traceback.format_exc())
167 parselog.critical("Unable to parse %s: %s" % (fn, exc))
168 sys.exit(1)
169 return wrapped
170
171@catch_parse_error
172def parse_config_file(fn, data, include=True):
173 return bb.parse.handle(fn, data, include)
174
175@catch_parse_error
176def _inherit(bbclass, data):
177 bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
178 return data
179
180def findConfigFile(configfile, data):
181 search = []
182 bbpath = data.getVar("BBPATH", True)
183 if bbpath:
184 for i in bbpath.split(":"):
185 search.append(os.path.join(i, "conf", configfile))
186 path = os.getcwd()
187 while path != "/":
188 search.append(os.path.join(path, "conf", configfile))
189 path, _ = os.path.split(path)
190
191 for i in search:
192 if os.path.exists(i):
193 return i
194
195 return None
196
197class CookerDataBuilder(object):
198
199 def __init__(self, cookercfg, worker = False):
200
201 self.prefiles = cookercfg.prefile
202 self.postfiles = cookercfg.postfile
203 self.tracking = cookercfg.tracking
204
205 bb.utils.set_context(bb.utils.clean_context())
206 bb.event.set_class_handlers(bb.event.clean_class_handlers())
207 self.data = bb.data.init()
208 if self.tracking:
209 self.data.enableTracking()
210
211 # Keep a datastore of the initial environment variables and their
212 # values from when BitBake was launched to enable child processes
213 # to use environment variables which have been cleaned from the
214 # BitBake processes env
215 self.savedenv = bb.data.init()
216 for k in cookercfg.env:
217 self.savedenv.setVar(k, cookercfg.env[k])
218
219 filtered_keys = bb.utils.approved_variables()
220 bb.data.inheritFromOS(self.data, self.savedenv, filtered_keys)
221 self.data.setVar("BB_ORIGENV", self.savedenv)
222
223 if worker:
224 self.data.setVar("BB_WORKERCONTEXT", "1")
225
226 def parseBaseConfiguration(self):
227 try:
228 self.parseConfigurationFiles(self.prefiles, self.postfiles)
229 except SyntaxError:
230 sys.exit(1)
231 except Exception:
232 logger.exception("Error parsing configuration files")
233 sys.exit(1)
234
235 def _findLayerConf(self, data):
236 return findConfigFile("bblayers.conf", data)
237
238 def parseConfigurationFiles(self, prefiles, postfiles):
239 data = self.data
240 bb.parse.init_parser(data)
241
242 # Parse files for loading *before* bitbake.conf and any includes
243 for f in prefiles:
244 data = parse_config_file(f, data)
245
246 layerconf = self._findLayerConf(data)
247 if layerconf:
248 parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
249 # By definition bblayers.conf is in conf/ of TOPDIR.
250 # We may have been called with cwd somewhere else so reset TOPDIR
251 data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
252 data = parse_config_file(layerconf, data)
253
254 layers = (data.getVar('BBLAYERS', True) or "").split()
255
256 data = bb.data.createCopy(data)
257 for layer in layers:
258 parselog.debug(2, "Adding layer %s", layer)
259 data.setVar('LAYERDIR', layer)
260 data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
261 data.expandVarref('LAYERDIR')
262
263 data.delVar('LAYERDIR')
264
265 if not data.getVar("BBPATH", True):
266 msg = "The BBPATH variable is not set"
267 if not layerconf:
268 msg += (" and bitbake did not find a conf/bblayers.conf file in"
269 " the expected location.\nMaybe you accidentally"
270 " invoked bitbake from the wrong directory?")
271 raise SystemExit(msg)
272
273 data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
274
275 # Parse files for loading *after* bitbake.conf and any includes
276 for p in postfiles:
277 data = parse_config_file(p, data)
278
279 # Handle any INHERITs and inherit the base class
280 bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split()
281 for bbclass in bbclasses:
282 data = _inherit(bbclass, data)
283
284 # Nomally we only register event handlers at the end of parsing .bb files
285 # We register any handlers we've found so far here...
286 for var in data.getVar('__BBHANDLERS') or []:
287 bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split())
288
289 if data.getVar("BB_WORKERCONTEXT", False) is None:
290 bb.fetch.fetcher_init(data)
291 bb.codeparser.parser_cache_init(data)
292 bb.event.fire(bb.event.ConfigParsed(), data)
293
294 if data.getVar("BB_INVALIDCONF") is True:
295 data.setVar("BB_INVALIDCONF", False)
296 self.parseConfigurationFiles(self.prefiles, self.postfiles)
297 return
298
299 bb.parse.init_parser(data)
300 data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
301 self.data = data
302 self.data_hash = data.get_hash()
303
304
305
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
new file mode 100644
index 0000000000..f0714b3af6
--- /dev/null
+++ b/bitbake/lib/bb/daemonize.py
@@ -0,0 +1,190 @@
1"""
2Python Deamonizing helper
3
4Configurable daemon behaviors:
5
6 1.) The current working directory set to the "/" directory.
7 2.) The current file creation mode mask set to 0.
8 3.) Close all open files (1024).
9 4.) Redirect standard I/O streams to "/dev/null".
10
11A failed call to fork() now raises an exception.
12
13References:
14 1) Advanced Programming in the Unix Environment: W. Richard Stevens
15 2) Unix Programming Frequently Asked Questions:
16 http://www.erlenstar.demon.co.uk/unix/faq_toc.html
17
18Modified to allow a function to be daemonized and return for
19bitbake use by Richard Purdie
20"""
21
22__author__ = "Chad J. Schroeder"
23__copyright__ = "Copyright (C) 2005 Chad J. Schroeder"
24__version__ = "0.2"
25
26# Standard Python modules.
27import os # Miscellaneous OS interfaces.
28import sys # System-specific parameters and functions.
29
30# Default daemon parameters.
31# File mode creation mask of the daemon.
32# For BitBake's children, we do want to inherit the parent umask.
33UMASK = None
34
35# Default maximum for the number of available file descriptors.
36MAXFD = 1024
37
38# The standard I/O file descriptors are redirected to /dev/null by default.
39if (hasattr(os, "devnull")):
40 REDIRECT_TO = os.devnull
41else:
42 REDIRECT_TO = "/dev/null"
43
44def createDaemon(function, logfile):
45 """
46 Detach a process from the controlling terminal and run it in the
47 background as a daemon, returning control to the caller.
48 """
49
50 try:
51 # Fork a child process so the parent can exit. This returns control to
52 # the command-line or shell. It also guarantees that the child will not
53 # be a process group leader, since the child receives a new process ID
54 # and inherits the parent's process group ID. This step is required
55 # to insure that the next call to os.setsid is successful.
56 pid = os.fork()
57 except OSError as e:
58 raise Exception("%s [%d]" % (e.strerror, e.errno))
59
60 if (pid == 0): # The first child.
61 # To become the session leader of this new session and the process group
62 # leader of the new process group, we call os.setsid(). The process is
63 # also guaranteed not to have a controlling terminal.
64 os.setsid()
65
66 # Is ignoring SIGHUP necessary?
67 #
68 # It's often suggested that the SIGHUP signal should be ignored before
69 # the second fork to avoid premature termination of the process. The
70 # reason is that when the first child terminates, all processes, e.g.
71 # the second child, in the orphaned group will be sent a SIGHUP.
72 #
73 # "However, as part of the session management system, there are exactly
74 # two cases where SIGHUP is sent on the death of a process:
75 #
76 # 1) When the process that dies is the session leader of a session that
77 # is attached to a terminal device, SIGHUP is sent to all processes
78 # in the foreground process group of that terminal device.
79 # 2) When the death of a process causes a process group to become
80 # orphaned, and one or more processes in the orphaned group are
81 # stopped, then SIGHUP and SIGCONT are sent to all members of the
82 # orphaned group." [2]
83 #
84 # The first case can be ignored since the child is guaranteed not to have
85 # a controlling terminal. The second case isn't so easy to dismiss.
86 # The process group is orphaned when the first child terminates and
87 # POSIX.1 requires that every STOPPED process in an orphaned process
88 # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the
89 # second child is not STOPPED though, we can safely forego ignoring the
90 # SIGHUP signal. In any case, there are no ill-effects if it is ignored.
91 #
92 # import signal # Set handlers for asynchronous events.
93 # signal.signal(signal.SIGHUP, signal.SIG_IGN)
94
95 try:
96 # Fork a second child and exit immediately to prevent zombies. This
97 # causes the second child process to be orphaned, making the init
98 # process responsible for its cleanup. And, since the first child is
99 # a session leader without a controlling terminal, it's possible for
100 # it to acquire one by opening a terminal in the future (System V-
101 # based systems). This second fork guarantees that the child is no
102 # longer a session leader, preventing the daemon from ever acquiring
103 # a controlling terminal.
104 pid = os.fork() # Fork a second child.
105 except OSError as e:
106 raise Exception("%s [%d]" % (e.strerror, e.errno))
107
108 if (pid == 0): # The second child.
109 # We probably don't want the file mode creation mask inherited from
110 # the parent, so we give the child complete control over permissions.
111 if UMASK is not None:
112 os.umask(UMASK)
113 else:
114 # Parent (the first child) of the second child.
115 os._exit(0)
116 else:
117 # exit() or _exit()?
118 # _exit is like exit(), but it doesn't call any functions registered
119 # with atexit (and on_exit) or any registered signal handlers. It also
120 # closes any open file descriptors. Using exit() may cause all stdio
121 # streams to be flushed twice and any temporary files may be unexpectedly
122 # removed. It's therefore recommended that child branches of a fork()
123 # and the parent branch(es) of a daemon use _exit().
124 return
125
126 # Close all open file descriptors. This prevents the child from keeping
127 # open any file descriptors inherited from the parent. There is a variety
128 # of methods to accomplish this task. Three are listed below.
129 #
130 # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
131 # number of open file descriptors to close. If it doesn't exists, use
132 # the default value (configurable).
133 #
134 # try:
135 # maxfd = os.sysconf("SC_OPEN_MAX")
136 # except (AttributeError, ValueError):
137 # maxfd = MAXFD
138 #
139 # OR
140 #
141 # if (os.sysconf_names.has_key("SC_OPEN_MAX")):
142 # maxfd = os.sysconf("SC_OPEN_MAX")
143 # else:
144 # maxfd = MAXFD
145 #
146 # OR
147 #
148 # Use the getrlimit method to retrieve the maximum file descriptor number
149 # that can be opened by this process. If there is not limit on the
150 # resource, use the default value.
151 #
152 import resource # Resource usage information.
153 maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
154 if (maxfd == resource.RLIM_INFINITY):
155 maxfd = MAXFD
156
157 # Iterate through and close all file descriptors.
158# for fd in range(0, maxfd):
159# try:
160# os.close(fd)
161# except OSError: # ERROR, fd wasn't open to begin with (ignored)
162# pass
163
164 # Redirect the standard I/O file descriptors to the specified file. Since
165 # the daemon has no controlling terminal, most daemons redirect stdin,
166 # stdout, and stderr to /dev/null. This is done to prevent side-effects
167 # from reads and writes to the standard I/O file descriptors.
168
169 # This call to open is guaranteed to return the lowest file descriptor,
170 # which will be 0 (stdin), since it was closed above.
171# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0)
172
173 # Duplicate standard input to standard output and standard error.
174# os.dup2(0, 1) # standard output (1)
175# os.dup2(0, 2) # standard error (2)
176
177
178 si = file('/dev/null', 'r')
179 so = file(logfile, 'w')
180 se = so
181
182
183 # Replace those fds with our own
184 os.dup2(si.fileno(), sys.stdin.fileno())
185 os.dup2(so.fileno(), sys.stdout.fileno())
186 os.dup2(se.fileno(), sys.stderr.fileno())
187
188 function()
189
190 os._exit(0)
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
new file mode 100644
index 0000000000..db938be1e6
--- /dev/null
+++ b/bitbake/lib/bb/data.py
@@ -0,0 +1,403 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Data' implementations
5
6Functions for interacting with the data structure used by the
7BitBake build tools.
8
9The expandData and update_data are the most expensive
10operations. At night the cookie monster came by and
11suggested 'give me cookies on setting the variables and
12things will work out'. Taking this suggestion into account
13applying the skills from the not yet passed 'Entwurf und
14Analyse von Algorithmen' lecture and the cookie
15monster seems to be right. We will track setVar more carefully
16to have faster update_data and expandKeys operations.
17
18This is a treade-off between speed and memory again but
19the speed is more critical here.
20"""
21
22# Copyright (C) 2003, 2004 Chris Larson
23# Copyright (C) 2005 Holger Hans Peter Freyther
24#
25# This program is free software; you can redistribute it and/or modify
26# it under the terms of the GNU General Public License version 2 as
27# published by the Free Software Foundation.
28#
29# This program is distributed in the hope that it will be useful,
30# but WITHOUT ANY WARRANTY; without even the implied warranty of
31# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
32# GNU General Public License for more details.
33#
34# You should have received a copy of the GNU General Public License along
35# with this program; if not, write to the Free Software Foundation, Inc.,
36# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
37#
38#Based on functions from the base bb module, Copyright 2003 Holger Schurig
39
40import sys, os, re
41if sys.argv[0][-5:] == "pydoc":
42 path = os.path.dirname(os.path.dirname(sys.argv[1]))
43else:
44 path = os.path.dirname(os.path.dirname(sys.argv[0]))
45sys.path.insert(0, path)
46from itertools import groupby
47
48from bb import data_smart
49from bb import codeparser
50import bb
51
52logger = data_smart.logger
53_dict_type = data_smart.DataSmart
54
55def init():
56 """Return a new object representing the Bitbake data"""
57 return _dict_type()
58
59def init_db(parent = None):
60 """Return a new object representing the Bitbake data,
61 optionally based on an existing object"""
62 if parent is not None:
63 return parent.createCopy()
64 else:
65 return _dict_type()
66
67def createCopy(source):
68 """Link the source set to the destination
69 If one does not find the value in the destination set,
70 search will go on to the source set to get the value.
71 Value from source are copy-on-write. i.e. any try to
72 modify one of them will end up putting the modified value
73 in the destination set.
74 """
75 return source.createCopy()
76
77def initVar(var, d):
78 """Non-destructive var init for data structure"""
79 d.initVar(var)
80
81
82def setVar(var, value, d):
83 """Set a variable to a given value"""
84 d.setVar(var, value)
85
86
87def getVar(var, d, exp = 0):
88 """Gets the value of a variable"""
89 return d.getVar(var, exp)
90
91
92def renameVar(key, newkey, d):
93 """Renames a variable from key to newkey"""
94 d.renameVar(key, newkey)
95
96def delVar(var, d):
97 """Removes a variable from the data set"""
98 d.delVar(var)
99
100def appendVar(var, value, d):
101 """Append additional value to a variable"""
102 d.appendVar(var, value)
103
104def setVarFlag(var, flag, flagvalue, d):
105 """Set a flag for a given variable to a given value"""
106 d.setVarFlag(var, flag, flagvalue)
107
108def getVarFlag(var, flag, d):
109 """Gets given flag from given var"""
110 return d.getVarFlag(var, flag)
111
112def delVarFlag(var, flag, d):
113 """Removes a given flag from the variable's flags"""
114 d.delVarFlag(var, flag)
115
116def setVarFlags(var, flags, d):
117 """Set the flags for a given variable
118
119 Note:
120 setVarFlags will not clear previous
121 flags. Think of this method as
122 addVarFlags
123 """
124 d.setVarFlags(var, flags)
125
126def getVarFlags(var, d):
127 """Gets a variable's flags"""
128 return d.getVarFlags(var)
129
130def delVarFlags(var, d):
131 """Removes a variable's flags"""
132 d.delVarFlags(var)
133
134def keys(d):
135 """Return a list of keys in d"""
136 return d.keys()
137
138
139__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
140__expand_python_regexp__ = re.compile(r"\${@.+?}")
141
142def expand(s, d, varname = None):
143 """Variable expansion using the data store"""
144 return d.expand(s, varname)
145
146def expandKeys(alterdata, readdata = None):
147 if readdata == None:
148 readdata = alterdata
149
150 todolist = {}
151 for key in alterdata:
152 if not '${' in key:
153 continue
154
155 ekey = expand(key, readdata)
156 if key == ekey:
157 continue
158 todolist[key] = ekey
159
160 # These two for loops are split for performance to maximise the
161 # usefulness of the expand cache
162
163 for key in todolist:
164 ekey = todolist[key]
165 newval = alterdata.getVar(ekey, 0)
166 if newval:
167 val = alterdata.getVar(key, 0)
168 if val is not None and newval is not None:
169 bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
170 alterdata.renameVar(key, ekey)
171
172def inheritFromOS(d, savedenv, permitted):
173 """Inherit variables from the initial environment."""
174 exportlist = bb.utils.preserved_envvars_exported()
175 for s in savedenv.keys():
176 if s in permitted:
177 try:
178 d.setVar(s, getVar(s, savedenv, True), op = 'from env')
179 if s in exportlist:
180 d.setVarFlag(s, "export", True, op = 'auto env export')
181 except TypeError:
182 pass
183
184def emit_var(var, o=sys.__stdout__, d = init(), all=False):
185 """Emit a variable to be sourced by a shell."""
186 if getVarFlag(var, "python", d):
187 return 0
188
189 export = getVarFlag(var, "export", d)
190 unexport = getVarFlag(var, "unexport", d)
191 func = getVarFlag(var, "func", d)
192 if not all and not export and not unexport and not func:
193 return 0
194
195 try:
196 if all:
197 oval = getVar(var, d, 0)
198 val = getVar(var, d, 1)
199 except (KeyboardInterrupt, bb.build.FuncFailed):
200 raise
201 except Exception as exc:
202 o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
203 return 0
204
205 if all:
206 d.varhistory.emit(var, oval, val, o)
207
208 if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
209 return 0
210
211 varExpanded = expand(var, d)
212
213 if unexport:
214 o.write('unset %s\n' % varExpanded)
215 return 0
216
217 if val is None:
218 return 0
219
220 val = str(val)
221
222 if func:
223 # NOTE: should probably check for unbalanced {} within the var
224 o.write("%s() {\n%s\n}\n" % (varExpanded, val))
225 return 1
226
227 if export:
228 o.write('export ')
229
230 # if we're going to output this within doublequotes,
231 # to a shell, we need to escape the quotes in the var
232 alter = re.sub('"', '\\"', val)
233 alter = re.sub('\n', ' \\\n', alter)
234 o.write('%s="%s"\n' % (varExpanded, alter))
235 return 0
236
237def emit_env(o=sys.__stdout__, d = init(), all=False):
238 """Emits all items in the data store in a format such that it can be sourced by a shell."""
239
240 isfunc = lambda key: bool(d.getVarFlag(key, "func"))
241 keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
242 grouped = groupby(keys, isfunc)
243 for isfunc, keys in grouped:
244 for key in keys:
245 emit_var(key, o, d, all and not isfunc) and o.write('\n')
246
247def exported_keys(d):
248 return (key for key in d.keys() if not key.startswith('__') and
249 d.getVarFlag(key, 'export') and
250 not d.getVarFlag(key, 'unexport'))
251
252def exported_vars(d):
253 for key in exported_keys(d):
254 try:
255 value = d.getVar(key, True)
256 except Exception:
257 pass
258
259 if value is not None:
260 yield key, str(value)
261
262def emit_func(func, o=sys.__stdout__, d = init()):
263 """Emits all items in the data store in a format such that it can be sourced by a shell."""
264
265 keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func"))
266 for key in keys:
267 emit_var(key, o, d, False) and o.write('\n')
268
269 emit_var(func, o, d, False) and o.write('\n')
270 newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
271 newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
272 seen = set()
273 while newdeps:
274 deps = newdeps
275 seen |= deps
276 newdeps = set()
277 for dep in deps:
278 if d.getVarFlag(dep, "func") and not d.getVarFlag(dep, "python"):
279 emit_var(dep, o, d, False) and o.write('\n')
280 newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
281 newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
282 newdeps -= seen
283
284def update_data(d):
285 """Performs final steps upon the datastore, including application of overrides"""
286 d.finalize(parent = True)
287
288def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
289 deps = set()
290 try:
291 if key[-1] == ']':
292 vf = key[:-1].split('[')
293 value = d.getVarFlag(vf[0], vf[1], False)
294 parser = d.expandWithRefs(value, key)
295 deps |= parser.references
296 deps = deps | (keys & parser.execs)
297 return deps, value
298 varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs"]) or {}
299 vardeps = varflags.get("vardeps")
300 value = d.getVar(key, False)
301
302 def handle_contains(value, contains, d):
303 newvalue = ""
304 for k in sorted(contains):
305 l = (d.getVar(k, True) or "").split()
306 for word in sorted(contains[k]):
307 if word in l:
308 newvalue += "\n%s{%s} = Set" % (k, word)
309 else:
310 newvalue += "\n%s{%s} = Unset" % (k, word)
311 if not newvalue:
312 return value
313 if not value:
314 return newvalue
315 return value + newvalue
316
317 if "vardepvalue" in varflags:
318 value = varflags.get("vardepvalue")
319 elif varflags.get("func"):
320 if varflags.get("python"):
321 parsedvar = d.expandWithRefs(value, key)
322 parser = bb.codeparser.PythonParser(key, logger)
323 if parsedvar.value and "\t" in parsedvar.value:
324 logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
325 parser.parse_python(parsedvar.value)
326 deps = deps | parser.references
327 value = handle_contains(value, parser.contains, d)
328 else:
329 parsedvar = d.expandWithRefs(value, key)
330 parser = bb.codeparser.ShellParser(key, logger)
331 parser.parse_shell(parsedvar.value)
332 deps = deps | shelldeps
333 if vardeps is None:
334 parser.log.flush()
335 if "prefuncs" in varflags:
336 deps = deps | set(varflags["prefuncs"].split())
337 if "postfuncs" in varflags:
338 deps = deps | set(varflags["postfuncs"].split())
339 deps = deps | parsedvar.references
340 deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
341 value = handle_contains(value, parsedvar.contains, d)
342 else:
343 parser = d.expandWithRefs(value, key)
344 deps |= parser.references
345 deps = deps | (keys & parser.execs)
346 value = handle_contains(value, parser.contains, d)
347
348 if "vardepvalueexclude" in varflags:
349 exclude = varflags.get("vardepvalueexclude")
350 for excl in exclude.split('|'):
351 if excl:
352 value = value.replace(excl, '')
353
354 # Add varflags, assuming an exclusion list is set
355 if varflagsexcl:
356 varfdeps = []
357 for f in varflags:
358 if f not in varflagsexcl:
359 varfdeps.append('%s[%s]' % (key, f))
360 if varfdeps:
361 deps |= set(varfdeps)
362
363 deps |= set((vardeps or "").split())
364 deps -= set(varflags.get("vardepsexclude", "").split())
365 except Exception as e:
366 raise bb.data_smart.ExpansionError(key, None, e)
367 return deps, value
368 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
369 #d.setVarFlag(key, "vardeps", deps)
370
371def generate_dependencies(d):
372
373 keys = set(key for key in d if not key.startswith("__"))
374 shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport"))
375 varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
376
377 deps = {}
378 values = {}
379
380 tasklist = d.getVar('__BBTASKS') or []
381 for task in tasklist:
382 deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
383 newdeps = deps[task]
384 seen = set()
385 while newdeps:
386 nextdeps = newdeps
387 seen |= nextdeps
388 newdeps = set()
389 for dep in nextdeps:
390 if dep not in deps:
391 deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d)
392 newdeps |= deps[dep]
393 newdeps -= seen
394 #print "For %s: %s" % (task, str(deps[task]))
395 return tasklist, deps, values
396
397def inherits_class(klass, d):
398 val = getVar('__inherit_cache', d) or []
399 needle = os.path.join('classes', '%s.bbclass' % klass)
400 for v in val:
401 if v.endswith(needle):
402 return True
403 return False
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
new file mode 100644
index 0000000000..e4bdb2fdd9
--- /dev/null
+++ b/bitbake/lib/bb/data_smart.py
@@ -0,0 +1,804 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Smart Dictionary Implementation
5
6Functions for interacting with the data structure used by the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2004, 2005 Seb Frankengul
13# Copyright (C) 2005, 2006 Holger Hans Peter Freyther
14# Copyright (C) 2005 Uli Luckas
15# Copyright (C) 2005 ROAD GmbH
16#
17# This program is free software; you can redistribute it and/or modify
18# it under the terms of the GNU General Public License version 2 as
19# published by the Free Software Foundation.
20#
21# This program is distributed in the hope that it will be useful,
22# but WITHOUT ANY WARRANTY; without even the implied warranty of
23# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24# GNU General Public License for more details.
25#
26# You should have received a copy of the GNU General Public License along
27# with this program; if not, write to the Free Software Foundation, Inc.,
28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
29# Based on functions from the base bb module, Copyright 2003 Holger Schurig
30
31import copy, re, sys, traceback
32from collections import MutableMapping
33import logging
34import hashlib
35import bb, bb.codeparser
36from bb import utils
37from bb.COW import COWDictBase
38
39logger = logging.getLogger("BitBake.Data")
40
41__setvar_keyword__ = ["_append", "_prepend", "_remove"]
42__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>.*))?$')
43__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t ]+}")
44__expand_python_regexp__ = re.compile(r"\${@.+?}")
45
46def infer_caller_details(loginfo, parent = False, varval = True):
47 """Save the caller the trouble of specifying everything."""
48 # Save effort.
49 if 'ignore' in loginfo and loginfo['ignore']:
50 return
51 # If nothing was provided, mark this as possibly unneeded.
52 if not loginfo:
53 loginfo['ignore'] = True
54 return
55 # Infer caller's likely values for variable (var) and value (value),
56 # to reduce clutter in the rest of the code.
57 if varval and ('variable' not in loginfo or 'detail' not in loginfo):
58 try:
59 raise Exception
60 except Exception:
61 tb = sys.exc_info()[2]
62 if parent:
63 above = tb.tb_frame.f_back.f_back
64 else:
65 above = tb.tb_frame.f_back
66 lcls = above.f_locals.items()
67 for k, v in lcls:
68 if k == 'value' and 'detail' not in loginfo:
69 loginfo['detail'] = v
70 if k == 'var' and 'variable' not in loginfo:
71 loginfo['variable'] = v
72 # Infer file/line/function from traceback
73 if 'file' not in loginfo:
74 depth = 3
75 if parent:
76 depth = 4
77 file, line, func, text = traceback.extract_stack(limit = depth)[0]
78 loginfo['file'] = file
79 loginfo['line'] = line
80 if func not in loginfo:
81 loginfo['func'] = func
82
83class VariableParse:
84 def __init__(self, varname, d, val = None):
85 self.varname = varname
86 self.d = d
87 self.value = val
88
89 self.references = set()
90 self.execs = set()
91 self.contains = {}
92
93 def var_sub(self, match):
94 key = match.group()[2:-1]
95 if self.varname and key:
96 if self.varname == key:
97 raise Exception("variable %s references itself!" % self.varname)
98 if key in self.d.expand_cache:
99 varparse = self.d.expand_cache[key]
100 var = varparse.value
101 else:
102 var = self.d.getVarFlag(key, "_content", True)
103 self.references.add(key)
104 if var is not None:
105 return var
106 else:
107 return match.group()
108
109 def python_sub(self, match):
110 code = match.group()[3:-1]
111 codeobj = compile(code.strip(), self.varname or "<expansion>", "eval")
112
113 parser = bb.codeparser.PythonParser(self.varname, logger)
114 parser.parse_python(code)
115 if self.varname:
116 vardeps = self.d.getVarFlag(self.varname, "vardeps", True)
117 if vardeps is None:
118 parser.log.flush()
119 else:
120 parser.log.flush()
121 self.references |= parser.references
122 self.execs |= parser.execs
123
124 for k in parser.contains:
125 if k not in self.contains:
126 self.contains[k] = parser.contains[k].copy()
127 else:
128 self.contains[k].update(parser.contains[k])
129 value = utils.better_eval(codeobj, DataContext(self.d))
130 return str(value)
131
132
133class DataContext(dict):
134 def __init__(self, metadata, **kwargs):
135 self.metadata = metadata
136 dict.__init__(self, **kwargs)
137 self['d'] = metadata
138
139 def __missing__(self, key):
140 value = self.metadata.getVar(key, True)
141 if value is None or self.metadata.getVarFlag(key, 'func'):
142 raise KeyError(key)
143 else:
144 return value
145
146class ExpansionError(Exception):
147 def __init__(self, varname, expression, exception):
148 self.expression = expression
149 self.variablename = varname
150 self.exception = exception
151 if varname:
152 if expression:
153 self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
154 else:
155 self.msg = "Failure expanding variable %s: %s: %s" % (varname, type(exception).__name__, exception)
156 else:
157 self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
158 Exception.__init__(self, self.msg)
159 self.args = (varname, expression, exception)
160 def __str__(self):
161 return self.msg
162
163class IncludeHistory(object):
164 def __init__(self, parent = None, filename = '[TOP LEVEL]'):
165 self.parent = parent
166 self.filename = filename
167 self.children = []
168 self.current = self
169
170 def copy(self):
171 new = IncludeHistory(self.parent, self.filename)
172 for c in self.children:
173 new.children.append(c)
174 return new
175
176 def include(self, filename):
177 newfile = IncludeHistory(self.current, filename)
178 self.current.children.append(newfile)
179 self.current = newfile
180 return self
181
182 def __enter__(self):
183 pass
184
185 def __exit__(self, a, b, c):
186 if self.current.parent:
187 self.current = self.current.parent
188 else:
189 bb.warn("Include log: Tried to finish '%s' at top level." % filename)
190 return False
191
192 def emit(self, o, level = 0):
193 """Emit an include history file, and its children."""
194 if level:
195 spaces = " " * (level - 1)
196 o.write("# %s%s" % (spaces, self.filename))
197 if len(self.children) > 0:
198 o.write(" includes:")
199 else:
200 o.write("#\n# INCLUDE HISTORY:\n#")
201 level = level + 1
202 for child in self.children:
203 o.write("\n")
204 child.emit(o, level)
205
206class VariableHistory(object):
207 def __init__(self, dataroot):
208 self.dataroot = dataroot
209 self.variables = COWDictBase.copy()
210
211 def copy(self):
212 new = VariableHistory(self.dataroot)
213 new.variables = self.variables.copy()
214 return new
215
216 def record(self, *kwonly, **loginfo):
217 if not self.dataroot._tracking:
218 return
219 if len(kwonly) > 0:
220 raise TypeError
221 infer_caller_details(loginfo, parent = True)
222 if 'ignore' in loginfo and loginfo['ignore']:
223 return
224 if 'op' not in loginfo or not loginfo['op']:
225 loginfo['op'] = 'set'
226 if 'detail' in loginfo:
227 loginfo['detail'] = str(loginfo['detail'])
228 if 'variable' not in loginfo or 'file' not in loginfo:
229 raise ValueError("record() missing variable or file.")
230 var = loginfo['variable']
231
232 if var not in self.variables:
233 self.variables[var] = []
234 self.variables[var].append(loginfo.copy())
235
236 def variable(self, var):
237 if var in self.variables:
238 return self.variables[var]
239 else:
240 return []
241
242 def emit(self, var, oval, val, o):
243 history = self.variable(var)
244 commentVal = re.sub('\n', '\n#', str(oval))
245 if history:
246 if len(history) == 1:
247 o.write("#\n# $%s\n" % var)
248 else:
249 o.write("#\n# $%s [%d operations]\n" % (var, len(history)))
250 for event in history:
251 # o.write("# %s\n" % str(event))
252 if 'func' in event:
253 # If we have a function listed, this is internal
254 # code, not an operation in a config file, and the
255 # full path is distracting.
256 event['file'] = re.sub('.*/', '', event['file'])
257 display_func = ' [%s]' % event['func']
258 else:
259 display_func = ''
260 if 'flag' in event:
261 flag = '[%s] ' % (event['flag'])
262 else:
263 flag = ''
264 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail'])))
265 if len(history) > 1:
266 o.write("# computed:\n")
267 o.write('# "%s"\n' % (commentVal))
268 else:
269 o.write("#\n# $%s\n# [no history recorded]\n#\n" % var)
270 o.write('# "%s"\n' % (commentVal))
271
272 def get_variable_files(self, var):
273 """Get the files where operations are made on a variable"""
274 var_history = self.variable(var)
275 files = []
276 for event in var_history:
277 files.append(event['file'])
278 return files
279
280 def get_variable_lines(self, var, f):
281 """Get the line where a operation is made on a variable in file f"""
282 var_history = self.variable(var)
283 lines = []
284 for event in var_history:
285 if f== event['file']:
286 line = event['line']
287 lines.append(line)
288 return lines
289
290 def del_var_history(self, var, f=None, line=None):
291 """If file f and line are not given, the entire history of var is deleted"""
292 if var in self.variables:
293 if f and line:
294 self.variables[var] = [ x for x in self.variables[var] if x['file']!=f and x['line']!=line]
295 else:
296 self.variables[var] = []
297
298class DataSmart(MutableMapping):
299 def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
300 self.dict = {}
301
302 self.inchistory = IncludeHistory()
303 self.varhistory = VariableHistory(self)
304 self._tracking = False
305
306 # cookie monster tribute
307 self._special_values = special
308 self._seen_overrides = seen
309
310 self.expand_cache = {}
311
312 def enableTracking(self):
313 self._tracking = True
314
315 def disableTracking(self):
316 self._tracking = False
317
318 def expandWithRefs(self, s, varname):
319
320 if not isinstance(s, basestring): # sanity check
321 return VariableParse(varname, self, s)
322
323 if varname and varname in self.expand_cache:
324 return self.expand_cache[varname]
325
326 varparse = VariableParse(varname, self)
327
328 while s.find('${') != -1:
329 olds = s
330 try:
331 s = __expand_var_regexp__.sub(varparse.var_sub, s)
332 s = __expand_python_regexp__.sub(varparse.python_sub, s)
333 if s == olds:
334 break
335 except ExpansionError:
336 raise
337 except bb.parse.SkipPackage:
338 raise
339 except Exception as exc:
340 raise ExpansionError(varname, s, exc)
341
342 varparse.value = s
343
344 if varname:
345 self.expand_cache[varname] = varparse
346
347 return varparse
348
349 def expand(self, s, varname = None):
350 return self.expandWithRefs(s, varname).value
351
352
353 def finalize(self, parent = False):
354 """Performs final steps upon the datastore, including application of overrides"""
355
356 overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
357 finalize_caller = {
358 'op': 'finalize',
359 }
360 infer_caller_details(finalize_caller, parent = parent, varval = False)
361
362 #
363 # Well let us see what breaks here. We used to iterate
364 # over each variable and apply the override and then
365 # do the line expanding.
366 # If we have bad luck - which we will have - the keys
367 # where in some order that is so important for this
368 # method which we don't have anymore.
369 # Anyway we will fix that and write test cases this
370 # time.
371
372 #
373 # First we apply all overrides
374 # Then we will handle _append and _prepend and store the _remove
375 # information for later.
376 #
377
378 # We only want to report finalization once per variable overridden.
379 finalizes_reported = {}
380
381 for o in overrides:
382 # calculate '_'+override
383 l = len(o) + 1
384
385 # see if one should even try
386 if o not in self._seen_overrides:
387 continue
388
389 vars = self._seen_overrides[o].copy()
390 for var in vars:
391 name = var[:-l]
392 try:
393 # Report only once, even if multiple changes.
394 if name not in finalizes_reported:
395 finalizes_reported[name] = True
396 finalize_caller['variable'] = name
397 finalize_caller['detail'] = 'was: ' + str(self.getVar(name, False))
398 self.varhistory.record(**finalize_caller)
399 # Copy history of the override over.
400 for event in self.varhistory.variable(var):
401 loginfo = event.copy()
402 loginfo['variable'] = name
403 loginfo['op'] = 'override[%s]:%s' % (o, loginfo['op'])
404 self.varhistory.record(**loginfo)
405 self.setVar(name, self.getVar(var, False), op = 'finalize', file = 'override[%s]' % o, line = '')
406 self.delVar(var)
407 except Exception:
408 logger.info("Untracked delVar")
409
410 # now on to the appends and prepends, and stashing the removes
411 for op in __setvar_keyword__:
412 if op in self._special_values:
413 appends = self._special_values[op] or []
414 for append in appends:
415 keep = []
416 for (a, o) in self.getVarFlag(append, op) or []:
417 match = True
418 if o:
419 for o2 in o.split("_"):
420 if not o2 in overrides:
421 match = False
422 if not match:
423 keep.append((a ,o))
424 continue
425
426 if op == "_append":
427 sval = self.getVar(append, False) or ""
428 sval += a
429 self.setVar(append, sval)
430 elif op == "_prepend":
431 sval = a + (self.getVar(append, False) or "")
432 self.setVar(append, sval)
433 elif op == "_remove":
434 removes = self.getVarFlag(append, "_removeactive", False) or []
435 removes.extend(a.split())
436 self.setVarFlag(append, "_removeactive", removes, ignore=True)
437
438 # We save overrides that may be applied at some later stage
439 if keep:
440 self.setVarFlag(append, op, keep, ignore=True)
441 else:
442 self.delVarFlag(append, op, ignore=True)
443
444 def initVar(self, var):
445 self.expand_cache = {}
446 if not var in self.dict:
447 self.dict[var] = {}
448
449 def _findVar(self, var):
450 dest = self.dict
451 while dest:
452 if var in dest:
453 return dest[var]
454
455 if "_data" not in dest:
456 break
457 dest = dest["_data"]
458
459 def _makeShadowCopy(self, var):
460 if var in self.dict:
461 return
462
463 local_var = self._findVar(var)
464
465 if local_var:
466 self.dict[var] = copy.copy(local_var)
467 else:
468 self.initVar(var)
469
470
471 def setVar(self, var, value, **loginfo):
472 #print("var=" + str(var) + " val=" + str(value))
473 if 'op' not in loginfo:
474 loginfo['op'] = "set"
475 self.expand_cache = {}
476 match = __setvar_regexp__.match(var)
477 if match and match.group("keyword") in __setvar_keyword__:
478 base = match.group('base')
479 keyword = match.group("keyword")
480 override = match.group('add')
481 l = self.getVarFlag(base, keyword) or []
482 l.append([value, override])
483 self.setVarFlag(base, keyword, l, ignore=True)
484 # And cause that to be recorded:
485 loginfo['detail'] = value
486 loginfo['variable'] = base
487 if override:
488 loginfo['op'] = '%s[%s]' % (keyword, override)
489 else:
490 loginfo['op'] = keyword
491 self.varhistory.record(**loginfo)
492 # todo make sure keyword is not __doc__ or __module__
493 # pay the cookie monster
494 try:
495 self._special_values[keyword].add(base)
496 except KeyError:
497 self._special_values[keyword] = set()
498 self._special_values[keyword].add(base)
499
500 return
501
502 if not var in self.dict:
503 self._makeShadowCopy(var)
504
505 # more cookies for the cookie monster
506 if '_' in var:
507 self._setvar_update_overrides(var)
508
509 # setting var
510 self.dict[var]["_content"] = value
511 self.varhistory.record(**loginfo)
512
513 def _setvar_update_overrides(self, var):
514 # aka pay the cookie monster
515 override = var[var.rfind('_')+1:]
516 if len(override) > 0:
517 if override not in self._seen_overrides:
518 self._seen_overrides[override] = set()
519 self._seen_overrides[override].add( var )
520
521 def getVar(self, var, expand=False, noweakdefault=False):
522 return self.getVarFlag(var, "_content", expand, noweakdefault)
523
524 def renameVar(self, key, newkey, **loginfo):
525 """
526 Rename the variable key to newkey
527 """
528 val = self.getVar(key, 0)
529 if val is not None:
530 loginfo['variable'] = newkey
531 loginfo['op'] = 'rename from %s' % key
532 loginfo['detail'] = val
533 self.varhistory.record(**loginfo)
534 self.setVar(newkey, val, ignore=True)
535
536 for i in (__setvar_keyword__):
537 src = self.getVarFlag(key, i)
538 if src is None:
539 continue
540
541 dest = self.getVarFlag(newkey, i) or []
542 dest.extend(src)
543 self.setVarFlag(newkey, i, dest, ignore=True)
544
545 if i in self._special_values and key in self._special_values[i]:
546 self._special_values[i].remove(key)
547 self._special_values[i].add(newkey)
548
549 loginfo['variable'] = key
550 loginfo['op'] = 'rename (to)'
551 loginfo['detail'] = newkey
552 self.varhistory.record(**loginfo)
553 self.delVar(key, ignore=True)
554
555 def appendVar(self, var, value, **loginfo):
556 loginfo['op'] = 'append'
557 self.varhistory.record(**loginfo)
558 newvalue = (self.getVar(var, False) or "") + value
559 self.setVar(var, newvalue, ignore=True)
560
561 def prependVar(self, var, value, **loginfo):
562 loginfo['op'] = 'prepend'
563 self.varhistory.record(**loginfo)
564 newvalue = value + (self.getVar(var, False) or "")
565 self.setVar(var, newvalue, ignore=True)
566
567 def delVar(self, var, **loginfo):
568 loginfo['detail'] = ""
569 loginfo['op'] = 'del'
570 self.varhistory.record(**loginfo)
571 self.expand_cache = {}
572 self.dict[var] = {}
573 if '_' in var:
574 override = var[var.rfind('_')+1:]
575 if override and override in self._seen_overrides and var in self._seen_overrides[override]:
576 self._seen_overrides[override].remove(var)
577
578 def setVarFlag(self, var, flag, value, **loginfo):
579 if 'op' not in loginfo:
580 loginfo['op'] = "set"
581 loginfo['flag'] = flag
582 self.varhistory.record(**loginfo)
583 if not var in self.dict:
584 self._makeShadowCopy(var)
585 self.dict[var][flag] = value
586
587 if flag == "defaultval" and '_' in var:
588 self._setvar_update_overrides(var)
589
590 if flag == "unexport" or flag == "export":
591 if not "__exportlist" in self.dict:
592 self._makeShadowCopy("__exportlist")
593 if not "_content" in self.dict["__exportlist"]:
594 self.dict["__exportlist"]["_content"] = set()
595 self.dict["__exportlist"]["_content"].add(var)
596
597 def getVarFlag(self, var, flag, expand=False, noweakdefault=False):
598 local_var = self._findVar(var)
599 value = None
600 if local_var is not None:
601 if flag in local_var:
602 value = copy.copy(local_var[flag])
603 elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
604 value = copy.copy(local_var["defaultval"])
605 if expand and value:
606 # Only getvar (flag == _content) hits the expand cache
607 cachename = None
608 if flag == "_content":
609 cachename = var
610 else:
611 cachename = var + "[" + flag + "]"
612 value = self.expand(value, cachename)
613 if value is not None and flag == "_content" and local_var is not None and "_removeactive" in local_var:
614 filtered = filter(lambda v: v not in local_var["_removeactive"],
615 value.split(" "))
616 value = " ".join(filtered)
617 if expand:
618 # We need to ensure the expand cache has the correct value
619 # flag == "_content" here
620 self.expand_cache[var].value = value
621 return value
622
623 def delVarFlag(self, var, flag, **loginfo):
624 local_var = self._findVar(var)
625 if not local_var:
626 return
627 if not var in self.dict:
628 self._makeShadowCopy(var)
629
630 if var in self.dict and flag in self.dict[var]:
631 loginfo['detail'] = ""
632 loginfo['op'] = 'delFlag'
633 loginfo['flag'] = flag
634 self.varhistory.record(**loginfo)
635
636 del self.dict[var][flag]
637
638 def appendVarFlag(self, var, flag, value, **loginfo):
639 loginfo['op'] = 'append'
640 loginfo['flag'] = flag
641 self.varhistory.record(**loginfo)
642 newvalue = (self.getVarFlag(var, flag, False) or "") + value
643 self.setVarFlag(var, flag, newvalue, ignore=True)
644
645 def prependVarFlag(self, var, flag, value, **loginfo):
646 loginfo['op'] = 'prepend'
647 loginfo['flag'] = flag
648 self.varhistory.record(**loginfo)
649 newvalue = value + (self.getVarFlag(var, flag, False) or "")
650 self.setVarFlag(var, flag, newvalue, ignore=True)
651
652 def setVarFlags(self, var, flags, **loginfo):
653 infer_caller_details(loginfo)
654 if not var in self.dict:
655 self._makeShadowCopy(var)
656
657 for i in flags:
658 if i == "_content":
659 continue
660 loginfo['flag'] = i
661 loginfo['detail'] = flags[i]
662 self.varhistory.record(**loginfo)
663 self.dict[var][i] = flags[i]
664
665 def getVarFlags(self, var, expand = False, internalflags=False):
666 local_var = self._findVar(var)
667 flags = {}
668
669 if local_var:
670 for i in local_var:
671 if i.startswith("_") and not internalflags:
672 continue
673 flags[i] = local_var[i]
674 if expand and i in expand:
675 flags[i] = self.expand(flags[i], var + "[" + i + "]")
676 if len(flags) == 0:
677 return None
678 return flags
679
680
681 def delVarFlags(self, var, **loginfo):
682 if not var in self.dict:
683 self._makeShadowCopy(var)
684
685 if var in self.dict:
686 content = None
687
688 loginfo['op'] = 'delete flags'
689 self.varhistory.record(**loginfo)
690
691 # try to save the content
692 if "_content" in self.dict[var]:
693 content = self.dict[var]["_content"]
694 self.dict[var] = {}
695 self.dict[var]["_content"] = content
696 else:
697 del self.dict[var]
698
699
700 def createCopy(self):
701 """
702 Create a copy of self by setting _data to self
703 """
704 # we really want this to be a DataSmart...
705 data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
706 data.dict["_data"] = self.dict
707 data.varhistory = self.varhistory.copy()
708 data.varhistory.datasmart = data
709 data.inchistory = self.inchistory.copy()
710
711 data._tracking = self._tracking
712
713 return data
714
715 def expandVarref(self, variable, parents=False):
716 """Find all references to variable in the data and expand it
717 in place, optionally descending to parent datastores."""
718
719 if parents:
720 keys = iter(self)
721 else:
722 keys = self.localkeys()
723
724 ref = '${%s}' % variable
725 value = self.getVar(variable, False)
726 for key in keys:
727 referrervalue = self.getVar(key, False)
728 if referrervalue and ref in referrervalue:
729 self.setVar(key, referrervalue.replace(ref, value))
730
731 def localkeys(self):
732 for key in self.dict:
733 if key != '_data':
734 yield key
735
736 def __iter__(self):
737 def keylist(d):
738 klist = set()
739 for key in d:
740 if key == "_data":
741 continue
742 if not d[key]:
743 continue
744 klist.add(key)
745
746 if "_data" in d:
747 klist |= keylist(d["_data"])
748
749 return klist
750
751 for k in keylist(self.dict):
752 yield k
753
754 def __len__(self):
755 return len(frozenset(self))
756
757 def __getitem__(self, item):
758 value = self.getVar(item, False)
759 if value is None:
760 raise KeyError(item)
761 else:
762 return value
763
764 def __setitem__(self, var, value):
765 self.setVar(var, value)
766
767 def __delitem__(self, var):
768 self.delVar(var)
769
770 def get_hash(self):
771 data = {}
772 d = self.createCopy()
773 bb.data.expandKeys(d)
774 bb.data.update_data(d)
775
776 config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split())
777 keys = set(key for key in iter(d) if not key.startswith("__"))
778 for key in keys:
779 if key in config_whitelist:
780 continue
781
782 value = d.getVar(key, False) or ""
783 data.update({key:value})
784
785 varflags = d.getVarFlags(key, internalflags = True)
786 if not varflags:
787 continue
788 for f in varflags:
789 if f == "_content":
790 continue
791 data.update({'%s[%s]' % (key, f):varflags[f]})
792
793 for key in ["__BBTASKS", "__BBANONFUNCS", "__BBHANDLERS"]:
794 bb_list = d.getVar(key, False) or []
795 bb_list.sort()
796 data.update({key:str(bb_list)})
797
798 if key == "__BBANONFUNCS":
799 for i in bb_list:
800 value = d.getVar(i, True) or ""
801 data.update({i:value})
802
803 data_str = str([(k, data[k]) for k in sorted(data.keys())])
804 return hashlib.md5(data_str).hexdigest()
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
new file mode 100644
index 0000000000..e2050431ec
--- /dev/null
+++ b/bitbake/lib/bb/event.py
@@ -0,0 +1,641 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Event' implementation
5
6Classes and functions for manipulating 'events' in the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import os, sys
26import warnings
27try:
28 import cPickle as pickle
29except ImportError:
30 import pickle
31import logging
32import atexit
33import traceback
34import bb.utils
35import bb.compat
36import bb.exceptions
37
38# This is the pid for which we should generate the event. This is set when
39# the runqueue forks off.
40worker_pid = 0
41worker_fire = None
42
43logger = logging.getLogger('BitBake.Event')
44
45class Event(object):
46 """Base class for events"""
47
48 def __init__(self):
49 self.pid = worker_pid
50
51Registered = 10
52AlreadyRegistered = 14
53
54def get_class_handlers():
55 return _handlers
56
57def set_class_handlers(h):
58 _handlers = h
59
60def clean_class_handlers():
61 return bb.compat.OrderedDict()
62
63# Internal
64_handlers = clean_class_handlers()
65_ui_handlers = {}
66_ui_logfilters = {}
67_ui_handler_seq = 0
68_event_handler_map = {}
69_catchall_handlers = {}
70
71def execute_handler(name, handler, event, d):
72 event.data = d
73 try:
74 ret = handler(event)
75 except bb.parse.SkipPackage:
76 raise
77 except Exception:
78 etype, value, tb = sys.exc_info()
79 logger.error("Execution of event handler '%s' failed" % name,
80 exc_info=(etype, value, tb.tb_next))
81 raise
82 except SystemExit as exc:
83 if exc.code != 0:
84 logger.error("Execution of event handler '%s' failed" % name)
85 raise
86 finally:
87 del event.data
88
89def fire_class_handlers(event, d):
90 if isinstance(event, logging.LogRecord):
91 return
92
93 eid = str(event.__class__)[8:-2]
94 evt_hmap = _event_handler_map.get(eid, {})
95 for name, handler in _handlers.iteritems():
96 if name in _catchall_handlers or name in evt_hmap:
97 try:
98 execute_handler(name, handler, event, d)
99 except Exception:
100 continue
101
102ui_queue = []
103@atexit.register
104def print_ui_queue():
105 """If we're exiting before a UI has been spawned, display any queued
106 LogRecords to the console."""
107 logger = logging.getLogger("BitBake")
108 if not _ui_handlers:
109 from bb.msg import BBLogFormatter
110 console = logging.StreamHandler(sys.stdout)
111 console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
112 logger.handlers = [console]
113
114 # First check to see if we have any proper messages
115 msgprint = False
116 for event in ui_queue:
117 if isinstance(event, logging.LogRecord):
118 if event.levelno > logging.DEBUG:
119 logger.handle(event)
120 msgprint = True
121 if msgprint:
122 return
123
124 # Nope, so just print all of the messages we have (including debug messages)
125 for event in ui_queue:
126 if isinstance(event, logging.LogRecord):
127 logger.handle(event)
128
129def fire_ui_handlers(event, d):
130 if not _ui_handlers:
131 # No UI handlers registered yet, queue up the messages
132 ui_queue.append(event)
133 return
134
135 errors = []
136 for h in _ui_handlers:
137 #print "Sending event %s" % event
138 try:
139 if not _ui_logfilters[h].filter(event):
140 continue
141 # We use pickle here since it better handles object instances
142 # which xmlrpc's marshaller does not. Events *must* be serializable
143 # by pickle.
144 if hasattr(_ui_handlers[h].event, "sendpickle"):
145 _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
146 else:
147 _ui_handlers[h].event.send(event)
148 except:
149 errors.append(h)
150 for h in errors:
151 del _ui_handlers[h]
152
153def fire(event, d):
154 """Fire off an Event"""
155
156 # We can fire class handlers in the worker process context and this is
157 # desired so they get the task based datastore.
158 # UI handlers need to be fired in the server context so we defer this. They
159 # don't have a datastore so the datastore context isn't a problem.
160
161 fire_class_handlers(event, d)
162 if worker_fire:
163 worker_fire(event, d)
164 else:
165 fire_ui_handlers(event, d)
166
167def fire_from_worker(event, d):
168 fire_ui_handlers(event, d)
169
170noop = lambda _: None
171def register(name, handler, mask=[]):
172 """Register an Event handler"""
173
174 # already registered
175 if name in _handlers:
176 return AlreadyRegistered
177
178 if handler is not None:
179 # handle string containing python code
180 if isinstance(handler, basestring):
181 tmp = "def %s(e):\n%s" % (name, handler)
182 try:
183 code = compile(tmp, "%s(e)" % name, "exec")
184 except SyntaxError:
185 logger.error("Unable to register event handler '%s':\n%s", name,
186 ''.join(traceback.format_exc(limit=0)))
187 _handlers[name] = noop
188 return
189 env = {}
190 bb.utils.better_exec(code, env)
191 func = bb.utils.better_eval(name, env)
192 _handlers[name] = func
193 else:
194 _handlers[name] = handler
195
196 if not mask or '*' in mask:
197 _catchall_handlers[name] = True
198 else:
199 for m in mask:
200 if _event_handler_map.get(m, None) is None:
201 _event_handler_map[m] = {}
202 _event_handler_map[m][name] = True
203
204 return Registered
205
206def remove(name, handler):
207 """Remove an Event handler"""
208 _handlers.pop(name)
209
210def register_UIHhandler(handler):
211 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
212 _ui_handlers[_ui_handler_seq] = handler
213 level, debug_domains = bb.msg.constructLogOptions()
214 _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
215 return _ui_handler_seq
216
217def unregister_UIHhandler(handlerNum):
218 if handlerNum in _ui_handlers:
219 del _ui_handlers[handlerNum]
220 return
221
222# Class to allow filtering of events and specific filtering of LogRecords *before* we put them over the IPC
223class UIEventFilter(object):
224 def __init__(self, level, debug_domains):
225 self.update(None, level, debug_domains)
226
227 def update(self, eventmask, level, debug_domains):
228 self.eventmask = eventmask
229 self.stdlevel = level
230 self.debug_domains = debug_domains
231
232 def filter(self, event):
233 if isinstance(event, logging.LogRecord):
234 if event.levelno >= self.stdlevel:
235 return True
236 if event.name in self.debug_domains and event.levelno >= self.debug_domains[event.name]:
237 return True
238 return False
239 eid = str(event.__class__)[8:-2]
240 if self.eventmask and eid not in self.eventmask:
241 return False
242 return True
243
244def set_UIHmask(handlerNum, level, debug_domains, mask):
245 if not handlerNum in _ui_handlers:
246 return False
247 if '*' in mask:
248 _ui_logfilters[handlerNum].update(None, level, debug_domains)
249 else:
250 _ui_logfilters[handlerNum].update(mask, level, debug_domains)
251 return True
252
253def getName(e):
254 """Returns the name of a class or class instance"""
255 if getattr(e, "__name__", None) == None:
256 return e.__class__.__name__
257 else:
258 return e.__name__
259
260class OperationStarted(Event):
261 """An operation has begun"""
262 def __init__(self, msg = "Operation Started"):
263 Event.__init__(self)
264 self.msg = msg
265
266class OperationCompleted(Event):
267 """An operation has completed"""
268 def __init__(self, total, msg = "Operation Completed"):
269 Event.__init__(self)
270 self.total = total
271 self.msg = msg
272
273class OperationProgress(Event):
274 """An operation is in progress"""
275 def __init__(self, current, total, msg = "Operation in Progress"):
276 Event.__init__(self)
277 self.current = current
278 self.total = total
279 self.msg = msg + ": %s/%s" % (current, total);
280
281class ConfigParsed(Event):
282 """Configuration Parsing Complete"""
283
284class RecipeEvent(Event):
285 def __init__(self, fn):
286 self.fn = fn
287 Event.__init__(self)
288
289class RecipePreFinalise(RecipeEvent):
290 """ Recipe Parsing Complete but not yet finialised"""
291
292class RecipeParsed(RecipeEvent):
293 """ Recipe Parsing Complete """
294
295class StampUpdate(Event):
296 """Trigger for any adjustment of the stamp files to happen"""
297
298 def __init__(self, targets, stampfns):
299 self._targets = targets
300 self._stampfns = stampfns
301 Event.__init__(self)
302
303 def getStampPrefix(self):
304 return self._stampfns
305
306 def getTargets(self):
307 return self._targets
308
309 stampPrefix = property(getStampPrefix)
310 targets = property(getTargets)
311
312class BuildBase(Event):
313 """Base class for bbmake run events"""
314
315 def __init__(self, n, p, failures = 0):
316 self._name = n
317 self._pkgs = p
318 Event.__init__(self)
319 self._failures = failures
320
321 def getPkgs(self):
322 return self._pkgs
323
324 def setPkgs(self, pkgs):
325 self._pkgs = pkgs
326
327 def getName(self):
328 return self._name
329
330 def setName(self, name):
331 self._name = name
332
333 def getCfg(self):
334 return self.data
335
336 def setCfg(self, cfg):
337 self.data = cfg
338
339 def getFailures(self):
340 """
341 Return the number of failed packages
342 """
343 return self._failures
344
345 pkgs = property(getPkgs, setPkgs, None, "pkgs property")
346 name = property(getName, setName, None, "name property")
347 cfg = property(getCfg, setCfg, None, "cfg property")
348
349
350
351
352
353class BuildStarted(BuildBase, OperationStarted):
354 """bbmake build run started"""
355 def __init__(self, n, p, failures = 0):
356 OperationStarted.__init__(self, "Building Started")
357 BuildBase.__init__(self, n, p, failures)
358
359class BuildCompleted(BuildBase, OperationCompleted):
360 """bbmake build run completed"""
361 def __init__(self, total, n, p, failures = 0):
362 if not failures:
363 OperationCompleted.__init__(self, total, "Building Succeeded")
364 else:
365 OperationCompleted.__init__(self, total, "Building Failed")
366 BuildBase.__init__(self, n, p, failures)
367
368class DiskFull(Event):
369 """Disk full case build aborted"""
370 def __init__(self, dev, type, freespace, mountpoint):
371 Event.__init__(self)
372 self._dev = dev
373 self._type = type
374 self._free = freespace
375 self._mountpoint = mountpoint
376
377class NoProvider(Event):
378 """No Provider for an Event"""
379
380 def __init__(self, item, runtime=False, dependees=None, reasons=[], close_matches=[]):
381 Event.__init__(self)
382 self._item = item
383 self._runtime = runtime
384 self._dependees = dependees
385 self._reasons = reasons
386 self._close_matches = close_matches
387
388 def getItem(self):
389 return self._item
390
391 def isRuntime(self):
392 return self._runtime
393
394class MultipleProviders(Event):
395 """Multiple Providers"""
396
397 def __init__(self, item, candidates, runtime = False):
398 Event.__init__(self)
399 self._item = item
400 self._candidates = candidates
401 self._is_runtime = runtime
402
403 def isRuntime(self):
404 """
405 Is this a runtime issue?
406 """
407 return self._is_runtime
408
409 def getItem(self):
410 """
411 The name for the to be build item
412 """
413 return self._item
414
415 def getCandidates(self):
416 """
417 Get the possible Candidates for a PROVIDER.
418 """
419 return self._candidates
420
421class ParseStarted(OperationStarted):
422 """Recipe parsing for the runqueue has begun"""
423 def __init__(self, total):
424 OperationStarted.__init__(self, "Recipe parsing Started")
425 self.total = total
426
427class ParseCompleted(OperationCompleted):
428 """Recipe parsing for the runqueue has completed"""
429 def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
430 OperationCompleted.__init__(self, total, "Recipe parsing Completed")
431 self.cached = cached
432 self.parsed = parsed
433 self.skipped = skipped
434 self.virtuals = virtuals
435 self.masked = masked
436 self.errors = errors
437 self.sofar = cached + parsed
438
439class ParseProgress(OperationProgress):
440 """Recipe parsing progress"""
441 def __init__(self, current, total):
442 OperationProgress.__init__(self, current, total, "Recipe parsing")
443
444
445class CacheLoadStarted(OperationStarted):
446 """Loading of the dependency cache has begun"""
447 def __init__(self, total):
448 OperationStarted.__init__(self, "Loading cache Started")
449 self.total = total
450
451class CacheLoadProgress(OperationProgress):
452 """Cache loading progress"""
453 def __init__(self, current, total):
454 OperationProgress.__init__(self, current, total, "Loading cache")
455
456class CacheLoadCompleted(OperationCompleted):
457 """Cache loading is complete"""
458 def __init__(self, total, num_entries):
459 OperationCompleted.__init__(self, total, "Loading cache Completed")
460 self.num_entries = num_entries
461
462class TreeDataPreparationStarted(OperationStarted):
463 """Tree data preparation started"""
464 def __init__(self):
465 OperationStarted.__init__(self, "Preparing tree data Started")
466
467class TreeDataPreparationProgress(OperationProgress):
468 """Tree data preparation is in progress"""
469 def __init__(self, current, total):
470 OperationProgress.__init__(self, current, total, "Preparing tree data")
471
472class TreeDataPreparationCompleted(OperationCompleted):
473 """Tree data preparation completed"""
474 def __init__(self, total):
475 OperationCompleted.__init__(self, total, "Preparing tree data Completed")
476
477class DepTreeGenerated(Event):
478 """
479 Event when a dependency tree has been generated
480 """
481
482 def __init__(self, depgraph):
483 Event.__init__(self)
484 self._depgraph = depgraph
485
486class TargetsTreeGenerated(Event):
487 """
488 Event when a set of buildable targets has been generated
489 """
490 def __init__(self, model):
491 Event.__init__(self)
492 self._model = model
493
494class FilesMatchingFound(Event):
495 """
496 Event when a list of files matching the supplied pattern has
497 been generated
498 """
499 def __init__(self, pattern, matches):
500 Event.__init__(self)
501 self._pattern = pattern
502 self._matches = matches
503
504class CoreBaseFilesFound(Event):
505 """
506 Event when a list of appropriate config files has been generated
507 """
508 def __init__(self, paths):
509 Event.__init__(self)
510 self._paths = paths
511
512class ConfigFilesFound(Event):
513 """
514 Event when a list of appropriate config files has been generated
515 """
516 def __init__(self, variable, values):
517 Event.__init__(self)
518 self._variable = variable
519 self._values = values
520
521class ConfigFilePathFound(Event):
522 """
523 Event when a path for a config file has been found
524 """
525 def __init__(self, path):
526 Event.__init__(self)
527 self._path = path
528
529class MsgBase(Event):
530 """Base class for messages"""
531
532 def __init__(self, msg):
533 self._message = msg
534 Event.__init__(self)
535
536class MsgDebug(MsgBase):
537 """Debug Message"""
538
539class MsgNote(MsgBase):
540 """Note Message"""
541
542class MsgWarn(MsgBase):
543 """Warning Message"""
544
545class MsgError(MsgBase):
546 """Error Message"""
547
548class MsgFatal(MsgBase):
549 """Fatal Message"""
550
551class MsgPlain(MsgBase):
552 """General output"""
553
554class LogExecTTY(Event):
555 """Send event containing program to spawn on tty of the logger"""
556 def __init__(self, msg, prog, sleep_delay, retries):
557 Event.__init__(self)
558 self.msg = msg
559 self.prog = prog
560 self.sleep_delay = sleep_delay
561 self.retries = retries
562
563class LogHandler(logging.Handler):
564 """Dispatch logging messages as bitbake events"""
565
566 def emit(self, record):
567 if record.exc_info:
568 etype, value, tb = record.exc_info
569 if hasattr(tb, 'tb_next'):
570 tb = list(bb.exceptions.extract_traceback(tb, context=3))
571 record.bb_exc_info = (etype, value, tb)
572 record.exc_info = None
573 fire(record, None)
574
575 def filter(self, record):
576 record.taskpid = worker_pid
577 return True
578
579class RequestPackageInfo(Event):
580 """
581 Event to request package information
582 """
583
584class PackageInfo(Event):
585 """
586 Package information for GUI
587 """
588 def __init__(self, pkginfolist):
589 Event.__init__(self)
590 self._pkginfolist = pkginfolist
591
592class MetadataEvent(Event):
593 """
594 Generic event that target for OE-Core classes
595 to report information during asynchrous execution
596 """
597 def __init__(self, eventtype, eventdata):
598 Event.__init__(self)
599 self.type = eventtype
600 self.data = eventdata
601
602class SanityCheck(Event):
603 """
604 Event to runs sanity checks, either raise errors or generate events as return status.
605 """
606 def __init__(self, generateevents = True):
607 Event.__init__(self)
608 self.generateevents = generateevents
609
610class SanityCheckPassed(Event):
611 """
612 Event to indicate sanity check is passed
613 """
614
615class SanityCheckFailed(Event):
616 """
617 Event to indicate sanity check has failed
618 """
619 def __init__(self, msg, network_error=False):
620 Event.__init__(self)
621 self._msg = msg
622 self._network_error = network_error
623
624class NetworkTest(Event):
625 """
626 Event to run network connectivity tests, either raise errors or generate events as return status.
627 """
628 def __init__(self, generateevents = True):
629 Event.__init__(self)
630 self.generateevents = generateevents
631
632class NetworkTestPassed(Event):
633 """
634 Event to indicate network test has passed
635 """
636
637class NetworkTestFailed(Event):
638 """
639 Event to indicate network test has failed
640 """
641
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py
new file mode 100644
index 0000000000..f182c8fd62
--- /dev/null
+++ b/bitbake/lib/bb/exceptions.py
@@ -0,0 +1,91 @@
1from __future__ import absolute_import
2import inspect
3import traceback
4import bb.namedtuple_with_abc
5from collections import namedtuple
6
7
8class TracebackEntry(namedtuple.abc):
9 """Pickleable representation of a traceback entry"""
10 _fields = 'filename lineno function args code_context index'
11 _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
12
13 def format(self, formatter=None):
14 if not self.code_context:
15 return self._header.format(self) + '\n'
16
17 formatted = [self._header.format(self) + ':\n']
18
19 for lineindex, line in enumerate(self.code_context):
20 if formatter:
21 line = formatter(line)
22
23 if lineindex == self.index:
24 formatted.append(' >%s' % line)
25 else:
26 formatted.append(' %s' % line)
27 return formatted
28
29 def __str__(self):
30 return ''.join(self.format())
31
32def _get_frame_args(frame):
33 """Get the formatted arguments and class (if available) for a frame"""
34 arginfo = inspect.getargvalues(frame)
35
36 try:
37 if not arginfo.args:
38 return '', None
39 # There have been reports from the field of python 2.6 which doesn't
40 # return a namedtuple here but simply a tuple so fallback gracefully if
41 # args isn't present.
42 except AttributeError:
43 return '', None
44
45 firstarg = arginfo.args[0]
46 if firstarg == 'self':
47 self = arginfo.locals['self']
48 cls = self.__class__.__name__
49
50 arginfo.args.pop(0)
51 del arginfo.locals['self']
52 else:
53 cls = None
54
55 formatted = inspect.formatargvalues(*arginfo)
56 return formatted, cls
57
58def extract_traceback(tb, context=1):
59 frames = inspect.getinnerframes(tb, context)
60 for frame, filename, lineno, function, code_context, index in frames:
61 formatted_args, cls = _get_frame_args(frame)
62 if cls:
63 function = '%s.%s' % (cls, function)
64 yield TracebackEntry(filename, lineno, function, formatted_args,
65 code_context, index)
66
67def format_extracted(extracted, formatter=None, limit=None):
68 if limit:
69 extracted = extracted[-limit:]
70
71 formatted = []
72 for tracebackinfo in extracted:
73 formatted.extend(tracebackinfo.format(formatter))
74 return formatted
75
76
77def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
78 formatted = ['Traceback (most recent call last):\n']
79
80 if hasattr(tb, 'tb_next'):
81 tb = extract_traceback(tb, context)
82
83 formatted.extend(format_extracted(tb, formatter, limit))
84 formatted.extend(traceback.format_exception_only(etype, value))
85 return formatted
86
87def to_string(exc):
88 if isinstance(exc, SystemExit):
89 if not isinstance(exc.code, basestring):
90 return 'Exited with "%d"' % exc.code
91 return str(exc)
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000000..5a03a0e46e
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1575 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from __future__ import absolute_import
29from __future__ import print_function
30import os, re
31import signal
32import glob
33import logging
34import urllib
35import urlparse
36import operator
37import bb.persist_data, bb.utils
38import bb.checksum
39from bb import data
40import bb.process
41import subprocess
42
43__version__ = "2"
44_checksum_cache = bb.checksum.FileChecksumCache()
45
46logger = logging.getLogger("BitBake.Fetcher")
47
48class BBFetchException(Exception):
49 """Class all fetch exceptions inherit from"""
50 def __init__(self, message):
51 self.msg = message
52 Exception.__init__(self, message)
53
54 def __str__(self):
55 return self.msg
56
57class MalformedUrl(BBFetchException):
58 """Exception raised when encountering an invalid url"""
59 def __init__(self, url):
60 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
61 self.url = url
62 BBFetchException.__init__(self, msg)
63 self.args = (url,)
64
65class FetchError(BBFetchException):
66 """General fetcher exception when something happens incorrectly"""
67 def __init__(self, message, url = None):
68 if url:
69 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
70 else:
71 msg = "Fetcher failure: %s" % message
72 self.url = url
73 BBFetchException.__init__(self, msg)
74 self.args = (message, url)
75
76class ChecksumError(FetchError):
77 """Exception when mismatched checksum encountered"""
78 def __init__(self, message, url = None, checksum = None):
79 self.checksum = checksum
80 FetchError.__init__(self, message, url)
81
82class NoChecksumError(FetchError):
83 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
84
85class UnpackError(BBFetchException):
86 """General fetcher exception when something happens incorrectly when unpacking"""
87 def __init__(self, message, url):
88 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
89 self.url = url
90 BBFetchException.__init__(self, msg)
91 self.args = (message, url)
92
93class NoMethodError(BBFetchException):
94 """Exception raised when there is no method to obtain a supplied url or set of urls"""
95 def __init__(self, url):
96 msg = "Could not find a fetcher which supports the URL: '%s'" % url
97 self.url = url
98 BBFetchException.__init__(self, msg)
99 self.args = (url,)
100
101class MissingParameterError(BBFetchException):
102 """Exception raised when a fetch method is missing a critical parameter in the url"""
103 def __init__(self, missing, url):
104 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
105 self.url = url
106 self.missing = missing
107 BBFetchException.__init__(self, msg)
108 self.args = (missing, url)
109
110class ParameterError(BBFetchException):
111 """Exception raised when a url cannot be proccessed due to invalid parameters."""
112 def __init__(self, message, url):
113 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
114 self.url = url
115 BBFetchException.__init__(self, msg)
116 self.args = (message, url)
117
118class NetworkAccess(BBFetchException):
119 """Exception raised when network access is disabled but it is required."""
120 def __init__(self, url, cmd):
121 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
122 self.url = url
123 self.cmd = cmd
124 BBFetchException.__init__(self, msg)
125 self.args = (url, cmd)
126
127class NonLocalMethod(Exception):
128 def __init__(self):
129 Exception.__init__(self)
130
131
132class URI(object):
133 """
134 A class representing a generic URI, with methods for
135 accessing the URI components, and stringifies to the
136 URI.
137
138 It is constructed by calling it with a URI, or setting
139 the attributes manually:
140
141 uri = URI("http://example.com/")
142
143 uri = URI()
144 uri.scheme = 'http'
145 uri.hostname = 'example.com'
146 uri.path = '/'
147
148 It has the following attributes:
149
150 * scheme (read/write)
151 * userinfo (authentication information) (read/write)
152 * username (read/write)
153 * password (read/write)
154
155 Note, password is deprecated as of RFC 3986.
156
157 * hostname (read/write)
158 * port (read/write)
159 * hostport (read only)
160 "hostname:port", if both are set, otherwise just "hostname"
161 * path (read/write)
162 * path_quoted (read/write)
163 A URI quoted version of path
164 * params (dict) (read/write)
165 * query (dict) (read/write)
166 * relative (bool) (read only)
167 True if this is a "relative URI", (e.g. file:foo.diff)
168
169 It stringifies to the URI itself.
170
171 Some notes about relative URIs: while it's specified that
172 a URI beginning with <scheme>:// should either be directly
173 followed by a hostname or a /, the old URI handling of the
174 fetch2 library did not comform to this. Therefore, this URI
175 class has some kludges to make sure that URIs are parsed in
176 a way comforming to bitbake's current usage. This URI class
177 supports the following:
178
179 file:relative/path.diff (IETF compliant)
180 git:relative/path.git (IETF compliant)
181 git:///absolute/path.git (IETF compliant)
182 file:///absolute/path.diff (IETF compliant)
183
184 file://relative/path.diff (not IETF compliant)
185
186 But it does not support the following:
187
188 file://hostname/absolute/path.diff (would be IETF compliant)
189
190 Note that the last case only applies to a list of
191 "whitelisted" schemes (currently only file://), that requires
192 its URIs to not have a network location.
193 """
194
195 _relative_schemes = ['file', 'git']
196 _netloc_forbidden = ['file']
197
198 def __init__(self, uri=None):
199 self.scheme = ''
200 self.userinfo = ''
201 self.hostname = ''
202 self.port = None
203 self._path = ''
204 self.params = {}
205 self.query = {}
206 self.relative = False
207
208 if not uri:
209 return
210
211 # We hijack the URL parameters, since the way bitbake uses
212 # them are not quite RFC compliant.
213 uri, param_str = (uri.split(";", 1) + [None])[:2]
214
215 urlp = urlparse.urlparse(uri)
216 self.scheme = urlp.scheme
217
218 reparse = 0
219
220 # Coerce urlparse to make URI scheme use netloc
221 if not self.scheme in urlparse.uses_netloc:
222 urlparse.uses_params.append(self.scheme)
223 reparse = 1
224
225 # Make urlparse happy(/ier) by converting local resources
226 # to RFC compliant URL format. E.g.:
227 # file://foo.diff -> file:foo.diff
228 if urlp.scheme in self._netloc_forbidden:
229 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
230 reparse = 1
231
232 if reparse:
233 urlp = urlparse.urlparse(uri)
234
235 # Identify if the URI is relative or not
236 if urlp.scheme in self._relative_schemes and \
237 re.compile("^\w+:(?!//)").match(uri):
238 self.relative = True
239
240 if not self.relative:
241 self.hostname = urlp.hostname or ''
242 self.port = urlp.port
243
244 self.userinfo += urlp.username or ''
245
246 if urlp.password:
247 self.userinfo += ':%s' % urlp.password
248
249 self.path = urllib.unquote(urlp.path)
250
251 if param_str:
252 self.params = self._param_str_split(param_str, ";")
253 if urlp.query:
254 self.query = self._param_str_split(urlp.query, "&")
255
256 def __str__(self):
257 userinfo = self.userinfo
258 if userinfo:
259 userinfo += '@'
260
261 return "%s:%s%s%s%s%s%s" % (
262 self.scheme,
263 '' if self.relative else '//',
264 userinfo,
265 self.hostport,
266 self.path_quoted,
267 self._query_str(),
268 self._param_str())
269
270 def _param_str(self):
271 return (
272 ''.join([';', self._param_str_join(self.params, ";")])
273 if self.params else '')
274
275 def _query_str(self):
276 return (
277 ''.join(['?', self._param_str_join(self.query, "&")])
278 if self.query else '')
279
280 def _param_str_split(self, string, elmdelim, kvdelim="="):
281 ret = {}
282 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
283 ret[k] = v
284 return ret
285
286 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
287 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
288
289 @property
290 def hostport(self):
291 if not self.port:
292 return self.hostname
293 return "%s:%d" % (self.hostname, self.port)
294
295 @property
296 def path_quoted(self):
297 return urllib.quote(self.path)
298
299 @path_quoted.setter
300 def path_quoted(self, path):
301 self.path = urllib.unquote(path)
302
303 @property
304 def path(self):
305 return self._path
306
307 @path.setter
308 def path(self, path):
309 self._path = path
310
311 if re.compile("^/").match(path):
312 self.relative = False
313 else:
314 self.relative = True
315
316 @property
317 def username(self):
318 if self.userinfo:
319 return (self.userinfo.split(":", 1))[0]
320 return ''
321
322 @username.setter
323 def username(self, username):
324 password = self.password
325 self.userinfo = username
326 if password:
327 self.userinfo += ":%s" % password
328
329 @property
330 def password(self):
331 if self.userinfo and ":" in self.userinfo:
332 return (self.userinfo.split(":", 1))[1]
333 return ''
334
335 @password.setter
336 def password(self, password):
337 self.userinfo = "%s:%s" % (self.username, password)
338
339def decodeurl(url):
340 """Decodes an URL into the tokens (scheme, network location, path,
341 user, password, parameters).
342 """
343
344 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
345 if not m:
346 raise MalformedUrl(url)
347
348 type = m.group('type')
349 location = m.group('location')
350 if not location:
351 raise MalformedUrl(url)
352 user = m.group('user')
353 parm = m.group('parm')
354
355 locidx = location.find('/')
356 if locidx != -1 and type.lower() != 'file':
357 host = location[:locidx]
358 path = location[locidx:]
359 else:
360 host = ""
361 path = location
362 if user:
363 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
364 if m:
365 user = m.group('user')
366 pswd = m.group('pswd')
367 else:
368 user = ''
369 pswd = ''
370
371 p = {}
372 if parm:
373 for s in parm.split(';'):
374 s1, s2 = s.split('=')
375 p[s1] = s2
376
377 return type, host, urllib.unquote(path), user, pswd, p
378
379def encodeurl(decoded):
380 """Encodes a URL from tokens (scheme, network location, path,
381 user, password, parameters).
382 """
383
384 type, host, path, user, pswd, p = decoded
385
386 if not path:
387 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
388 if not type:
389 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
390 url = '%s://' % type
391 if user and type != "file":
392 url += "%s" % user
393 if pswd:
394 url += ":%s" % pswd
395 url += "@"
396 if host and type != "file":
397 url += "%s" % host
398 # Standardise path to ensure comparisons work
399 while '//' in path:
400 path = path.replace("//", "/")
401 url += "%s" % urllib.quote(path)
402 if p:
403 for parm in p:
404 url += ";%s=%s" % (parm, p[parm])
405
406 return url
407
408def uri_replace(ud, uri_find, uri_replace, replacements, d):
409 if not ud.url or not uri_find or not uri_replace:
410 logger.error("uri_replace: passed an undefined value, not replacing")
411 return None
412 uri_decoded = list(decodeurl(ud.url))
413 uri_find_decoded = list(decodeurl(uri_find))
414 uri_replace_decoded = list(decodeurl(uri_replace))
415 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
416 result_decoded = ['', '', '', '', '', {}]
417 for loc, i in enumerate(uri_find_decoded):
418 result_decoded[loc] = uri_decoded[loc]
419 regexp = i
420 if loc == 0 and regexp and not regexp.endswith("$"):
421 # Leaving the type unanchored can mean "https" matching "file" can become "files"
422 # which is clearly undesirable.
423 regexp += "$"
424 if loc == 5:
425 # Handle URL parameters
426 if i:
427 # Any specified URL parameters must match
428 for k in uri_replace_decoded[loc]:
429 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
430 return None
431 # Overwrite any specified replacement parameters
432 for k in uri_replace_decoded[loc]:
433 for l in replacements:
434 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
435 result_decoded[loc][k] = uri_replace_decoded[loc][k]
436 elif (re.match(regexp, uri_decoded[loc])):
437 if not uri_replace_decoded[loc]:
438 result_decoded[loc] = ""
439 else:
440 for k in replacements:
441 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
442 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
443 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
444 if loc == 2:
445 # Handle path manipulations
446 basename = None
447 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
448 # If the source and destination url types differ, must be a mirrortarball mapping
449 basename = os.path.basename(ud.mirrortarball)
450 # Kill parameters, they make no sense for mirror tarballs
451 uri_decoded[5] = {}
452 elif ud.localpath and ud.method.supports_checksum(ud):
453 basename = os.path.basename(ud.localpath)
454 if basename and not result_decoded[loc].endswith(basename):
455 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
456 else:
457 return None
458 result = encodeurl(result_decoded)
459 if result == ud.url:
460 return None
461 logger.debug(2, "For url %s returning %s" % (ud.url, result))
462 return result
463
464methods = []
465urldata_cache = {}
466saved_headrevs = {}
467
468def fetcher_init(d):
469 """
470 Called to initialize the fetchers once the configuration data is known.
471 Calls before this must not hit the cache.
472 """
473 # When to drop SCM head revisions controlled by user policy
474 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
475 if srcrev_policy == "cache":
476 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
477 elif srcrev_policy == "clear":
478 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
479 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
480 try:
481 bb.fetch2.saved_headrevs = revs.items()
482 except:
483 pass
484 revs.clear()
485 else:
486 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
487
488 _checksum_cache.init_cache(d)
489
490 for m in methods:
491 if hasattr(m, "init"):
492 m.init(d)
493
494def fetcher_parse_save(d):
495 _checksum_cache.save_extras(d)
496
497def fetcher_parse_done(d):
498 _checksum_cache.save_merge(d)
499
500def fetcher_compare_revisions(d):
501 """
502 Compare the revisions in the persistant cache with current values and
503 return true/false on whether they've changed.
504 """
505
506 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
507 data2 = bb.fetch2.saved_headrevs
508
509 changed = False
510 for key in data:
511 if key not in data2 or data2[key] != data[key]:
512 logger.debug(1, "%s changed", key)
513 changed = True
514 return True
515 else:
516 logger.debug(2, "%s did not change", key)
517 return False
518
519def mirror_from_string(data):
520 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
521
522def verify_checksum(ud, d):
523 """
524 verify the MD5 and SHA256 checksum for downloaded src
525
526 Raises a FetchError if one or both of the SRC_URI checksums do not match
527 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
528 checksums specified.
529
530 """
531
532 if not ud.method.supports_checksum(ud):
533 return
534
535 md5data = bb.utils.md5_file(ud.localpath)
536 sha256data = bb.utils.sha256_file(ud.localpath)
537
538 if ud.method.recommends_checksum(ud):
539 # If strict checking enabled and neither sum defined, raise error
540 strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
541 if strict and not (ud.md5_expected or ud.sha256_expected):
542 logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
543 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
544 (ud.localpath, ud.md5_name, md5data,
545 ud.sha256_name, sha256data))
546 raise NoChecksumError('Missing SRC_URI checksum', ud.url)
547
548 # Log missing sums so user can more easily add them
549 if not ud.md5_expected:
550 logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
551 'SRC_URI[%s] = "%s"',
552 ud.localpath, ud.md5_name, md5data)
553
554 if not ud.sha256_expected:
555 logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
556 'SRC_URI[%s] = "%s"',
557 ud.localpath, ud.sha256_name, sha256data)
558
559 md5mismatch = False
560 sha256mismatch = False
561
562 if ud.md5_expected != md5data:
563 md5mismatch = True
564
565 if ud.sha256_expected != sha256data:
566 sha256mismatch = True
567
568 # We want to alert the user if a checksum is defined in the recipe but
569 # it does not match.
570 msg = ""
571 mismatch = False
572 if md5mismatch and ud.md5_expected:
573 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
574 mismatch = True;
575
576 if sha256mismatch and ud.sha256_expected:
577 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
578 mismatch = True;
579
580 if mismatch:
581 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
582
583 if len(msg):
584 raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
585
586
587def update_stamp(ud, d):
588 """
589 donestamp is file stamp indicating the whole fetching is done
590 this function update the stamp after verifying the checksum
591 """
592 if os.path.exists(ud.donestamp):
593 # Touch the done stamp file to show active use of the download
594 try:
595 os.utime(ud.donestamp, None)
596 except:
597 # Errors aren't fatal here
598 pass
599 else:
600 verify_checksum(ud, d)
601 open(ud.donestamp, 'w').close()
602
603def subprocess_setup():
604 # Python installs a SIGPIPE handler by default. This is usually not what
605 # non-Python subprocesses expect.
606 # SIGPIPE errors are known issues with gzip/bash
607 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
608
609def get_autorev(d):
610 # only not cache src rev in autorev case
611 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
612 d.setVar('__BB_DONT_CACHE', '1')
613 return "AUTOINC"
614
615def get_srcrev(d):
616 """
617 Return the version string for the current package
618 (usually to be used as PV)
619 Most packages usually only have one SCM so we just pass on the call.
620 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
621 have been set.
622 """
623
624 scms = []
625 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
626 urldata = fetcher.ud
627 for u in urldata:
628 if urldata[u].method.supports_srcrev():
629 scms.append(u)
630
631 if len(scms) == 0:
632 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
633
634 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
635 autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
636 if len(rev) > 10:
637 rev = rev[:10]
638 if autoinc:
639 return "AUTOINC+" + rev
640 return rev
641
642 #
643 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
644 #
645 format = d.getVar('SRCREV_FORMAT', True)
646 if not format:
647 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
648
649 seenautoinc = False
650 for scm in scms:
651 ud = urldata[scm]
652 for name in ud.names:
653 autoinc, rev = ud.method.sortable_revision(ud, d, name)
654 seenautoinc = seenautoinc or autoinc
655 if len(rev) > 10:
656 rev = rev[:10]
657 format = format.replace(name, rev)
658 if seenautoinc:
659 format = "AUTOINC+" + format
660
661 return format
662
663def localpath(url, d):
664 fetcher = bb.fetch2.Fetch([url], d)
665 return fetcher.localpath(url)
666
667def runfetchcmd(cmd, d, quiet = False, cleanup = []):
668 """
669 Run cmd returning the command output
670 Raise an error if interrupted or cmd fails
671 Optionally echo command output to stdout
672 Optionally remove the files/directories listed in cleanup upon failure
673 """
674
675 # Need to export PATH as binary could be in metadata paths
676 # rather than host provided
677 # Also include some other variables.
678 # FIXME: Should really include all export varaiables?
679 exportvars = ['HOME', 'PATH',
680 'HTTP_PROXY', 'http_proxy',
681 'HTTPS_PROXY', 'https_proxy',
682 'FTP_PROXY', 'ftp_proxy',
683 'FTPS_PROXY', 'ftps_proxy',
684 'NO_PROXY', 'no_proxy',
685 'ALL_PROXY', 'all_proxy',
686 'GIT_PROXY_COMMAND',
687 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
688 'SOCKS5_USER', 'SOCKS5_PASSWD']
689
690 for var in exportvars:
691 val = d.getVar(var, True)
692 if val:
693 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
694
695 logger.debug(1, "Running %s", cmd)
696
697 success = False
698 error_message = ""
699
700 try:
701 (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
702 success = True
703 except bb.process.NotFoundError as e:
704 error_message = "Fetch command %s" % (e.command)
705 except bb.process.ExecutionError as e:
706 if e.stdout:
707 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
708 elif e.stderr:
709 output = "output:\n%s" % e.stderr
710 else:
711 output = "no output"
712 error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
713 except bb.process.CmdError as e:
714 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
715 if not success:
716 for f in cleanup:
717 try:
718 bb.utils.remove(f, True)
719 except OSError:
720 pass
721
722 raise FetchError(error_message)
723
724 return output
725
726def check_network_access(d, info = "", url = None):
727 """
728 log remote network access, and error if BB_NO_NETWORK is set
729 """
730 if d.getVar("BB_NO_NETWORK", True) == "1":
731 raise NetworkAccess(url, info)
732 else:
733 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
734
735def build_mirroruris(origud, mirrors, ld):
736 uris = []
737 uds = []
738
739 replacements = {}
740 replacements["TYPE"] = origud.type
741 replacements["HOST"] = origud.host
742 replacements["PATH"] = origud.path
743 replacements["BASENAME"] = origud.path.split("/")[-1]
744 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
745
746 def adduri(ud, uris, uds):
747 for line in mirrors:
748 try:
749 (find, replace) = line
750 except ValueError:
751 continue
752 newuri = uri_replace(ud, find, replace, replacements, ld)
753 if not newuri or newuri in uris or newuri == origud.url:
754 continue
755 try:
756 newud = FetchData(newuri, ld)
757 newud.setup_localpath(ld)
758 except bb.fetch2.BBFetchException as e:
759 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
760 logger.debug(1, str(e))
761 try:
762 ud.method.clean(ud, ld)
763 except UnboundLocalError:
764 pass
765 continue
766 uris.append(newuri)
767 uds.append(newud)
768
769 adduri(newud, uris, uds)
770
771 adduri(origud, uris, uds)
772
773 return uris, uds
774
775def rename_bad_checksum(ud, suffix):
776 """
777 Renames files to have suffix from parameter
778 """
779
780 if ud.localpath is None:
781 return
782
783 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
784 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
785 bb.utils.movefile(ud.localpath, new_localpath)
786
787
788def try_mirror_url(origud, ud, ld, check = False):
789 # Return of None or a value means we're finished
790 # False means try another url
791 try:
792 if check:
793 found = ud.method.checkstatus(ud, ld)
794 if found:
795 return found
796 return False
797
798 os.chdir(ld.getVar("DL_DIR", True))
799
800 if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
801 ud.method.download(ud, ld)
802 if hasattr(ud.method,"build_mirror_data"):
803 ud.method.build_mirror_data(ud, ld)
804
805 if not ud.localpath or not os.path.exists(ud.localpath):
806 return False
807
808 if ud.localpath == origud.localpath:
809 return ud.localpath
810
811 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
812 # If that tarball is a local file:// we need to provide a symlink to it
813 dldir = ld.getVar("DL_DIR", True)
814 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
815 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
816 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
817 open(ud.donestamp, 'w').close()
818 dest = os.path.join(dldir, os.path.basename(ud.localpath))
819 if not os.path.exists(dest):
820 os.symlink(ud.localpath, dest)
821 if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
822 origud.method.download(origud, ld)
823 if hasattr(origud.method,"build_mirror_data"):
824 origud.method.build_mirror_data(origud, ld)
825 return ud.localpath
826 # Otherwise the result is a local file:// and we symlink to it
827 if not os.path.exists(origud.localpath):
828 if os.path.islink(origud.localpath):
829 # Broken symbolic link
830 os.unlink(origud.localpath)
831
832 os.symlink(ud.localpath, origud.localpath)
833 update_stamp(origud, ld)
834 return ud.localpath
835
836 except bb.fetch2.NetworkAccess:
837 raise
838
839 except bb.fetch2.BBFetchException as e:
840 if isinstance(e, ChecksumError):
841 logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
842 logger.warn(str(e))
843 rename_bad_checksum(ud, e.checksum)
844 elif isinstance(e, NoChecksumError):
845 raise
846 else:
847 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
848 logger.debug(1, str(e))
849 try:
850 ud.method.clean(ud, ld)
851 except UnboundLocalError:
852 pass
853 return False
854
855def try_mirrors(d, origud, mirrors, check = False):
856 """
857 Try to use a mirrored version of the sources.
858 This method will be automatically called before the fetchers go.
859
860 d Is a bb.data instance
861 uri is the original uri we're trying to download
862 mirrors is the list of mirrors we're going to try
863 """
864 ld = d.createCopy()
865
866 uris, uds = build_mirroruris(origud, mirrors, ld)
867
868 for index, uri in enumerate(uris):
869 ret = try_mirror_url(origud, uds[index], ld, check)
870 if ret != False:
871 return ret
872 return None
873
874def srcrev_internal_helper(ud, d, name):
875 """
876 Return:
877 a) a source revision if specified
878 b) latest revision if SRCREV="AUTOINC"
879 c) None if not specified
880 """
881
882 srcrev = None
883 pn = d.getVar("PN", True)
884 attempts = []
885 if name != '' and pn:
886 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
887 if name != '':
888 attempts.append("SRCREV_%s" % name)
889 if pn:
890 attempts.append("SRCREV_pn-%s" % pn)
891 attempts.append("SRCREV")
892
893 for a in attempts:
894 srcrev = d.getVar(a, True)
895 if srcrev and srcrev != "INVALID":
896 break
897
898 if 'rev' in ud.parm and 'tag' in ud.parm:
899 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
900
901 if 'rev' in ud.parm or 'tag' in ud.parm:
902 if 'rev' in ud.parm:
903 parmrev = ud.parm['rev']
904 else:
905 parmrev = ud.parm['tag']
906 if srcrev == "INVALID" or not srcrev:
907 return parmrev
908 if srcrev != parmrev:
909 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
910 return parmrev
911
912 if srcrev == "INVALID" or not srcrev:
913 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
914 if srcrev == "AUTOINC":
915 srcrev = ud.method.latest_revision(ud, d, name)
916
917 return srcrev
918
919def get_checksum_file_list(d):
920 """ Get a list of files checksum in SRC_URI
921
922 Returns the resolved local paths of all local file entries in
923 SRC_URI as a space-separated string
924 """
925 fetch = Fetch([], d, cache = False, localonly = True)
926
927 dl_dir = d.getVar('DL_DIR', True)
928 filelist = []
929 for u in fetch.urls:
930 ud = fetch.ud[u]
931
932 if ud and isinstance(ud.method, local.Local):
933 ud.setup_localpath(d)
934 f = ud.localpath
935 pth = ud.decodedurl
936 if '*' in pth:
937 f = os.path.join(os.path.abspath(f), pth)
938 if f.startswith(dl_dir):
939 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
940 if os.path.exists(f):
941 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
942 else:
943 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
944 filelist.append(f)
945
946 return " ".join(filelist)
947
948
949def get_file_checksums(filelist, pn):
950 """Get a list of the checksums for a list of local files
951
952 Returns the checksums for a list of local files, caching the results as
953 it proceeds
954
955 """
956
957 def checksum_file(f):
958 try:
959 checksum = _checksum_cache.get_checksum(f)
960 except OSError as e:
961 bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
962 return None
963 return checksum
964
965 def checksum_dir(pth):
966 # Handle directories recursively
967 dirchecksums = []
968 for root, dirs, files in os.walk(pth):
969 for name in files:
970 fullpth = os.path.join(root, name)
971 checksum = checksum_file(fullpth)
972 if checksum:
973 dirchecksums.append((fullpth, checksum))
974 return dirchecksums
975
976 checksums = []
977 for pth in filelist.split():
978 checksum = None
979 if '*' in pth:
980 # Handle globs
981 for f in glob.glob(pth):
982 if os.path.isdir(f):
983 checksums.extend(checksum_dir(f))
984 else:
985 checksum = checksum_file(f)
986 if checksum:
987 checksums.append((f, checksum))
988 continue
989 elif os.path.isdir(pth):
990 checksums.extend(checksum_dir(pth))
991 continue
992 else:
993 checksum = checksum_file(pth)
994
995 if checksum:
996 checksums.append((pth, checksum))
997
998 checksums.sort(key=operator.itemgetter(1))
999 return checksums
1000
1001
1002class FetchData(object):
1003 """
1004 A class which represents the fetcher state for a given URI.
1005 """
1006 def __init__(self, url, d, localonly = False):
1007 # localpath is the location of a downloaded result. If not set, the file is local.
1008 self.donestamp = None
1009 self.localfile = ""
1010 self.localpath = None
1011 self.lockfile = None
1012 self.mirrortarball = None
1013 self.basename = None
1014 self.basepath = None
1015 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
1016 self.date = self.getSRCDate(d)
1017 self.url = url
1018 if not self.user and "user" in self.parm:
1019 self.user = self.parm["user"]
1020 if not self.pswd and "pswd" in self.parm:
1021 self.pswd = self.parm["pswd"]
1022 self.setup = False
1023
1024 if "name" in self.parm:
1025 self.md5_name = "%s.md5sum" % self.parm["name"]
1026 self.sha256_name = "%s.sha256sum" % self.parm["name"]
1027 else:
1028 self.md5_name = "md5sum"
1029 self.sha256_name = "sha256sum"
1030 if self.md5_name in self.parm:
1031 self.md5_expected = self.parm[self.md5_name]
1032 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1033 self.md5_expected = None
1034 else:
1035 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
1036 if self.sha256_name in self.parm:
1037 self.sha256_expected = self.parm[self.sha256_name]
1038 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1039 self.sha256_expected = None
1040 else:
1041 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
1042
1043 self.names = self.parm.get("name",'default').split(',')
1044
1045 self.method = None
1046 for m in methods:
1047 if m.supports(self, d):
1048 self.method = m
1049 break
1050
1051 if not self.method:
1052 raise NoMethodError(url)
1053
1054 if localonly and not isinstance(self.method, local.Local):
1055 raise NonLocalMethod()
1056
1057 if self.parm.get("proto", None) and "protocol" not in self.parm:
1058 logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
1059 self.parm["protocol"] = self.parm.get("proto", None)
1060
1061 if hasattr(self.method, "urldata_init"):
1062 self.method.urldata_init(self, d)
1063
1064 if "localpath" in self.parm:
1065 # if user sets localpath for file, use it instead.
1066 self.localpath = self.parm["localpath"]
1067 self.basename = os.path.basename(self.localpath)
1068 elif self.localfile:
1069 self.localpath = self.method.localpath(self, d)
1070
1071 dldir = d.getVar("DL_DIR", True)
1072 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1073 if self.localpath and self.localpath.startswith(dldir):
1074 basepath = self.localpath
1075 elif self.localpath:
1076 basepath = dldir + os.sep + os.path.basename(self.localpath)
1077 else:
1078 basepath = dldir + os.sep + (self.basepath or self.basename)
1079 self.donestamp = basepath + '.done'
1080 self.lockfile = basepath + '.lock'
1081
1082 def setup_revisons(self, d):
1083 self.revisions = {}
1084 for name in self.names:
1085 self.revisions[name] = srcrev_internal_helper(self, d, name)
1086
1087 # add compatibility code for non name specified case
1088 if len(self.names) == 1:
1089 self.revision = self.revisions[self.names[0]]
1090
1091 def setup_localpath(self, d):
1092 if not self.localpath:
1093 self.localpath = self.method.localpath(self, d)
1094
1095 def getSRCDate(self, d):
1096 """
1097 Return the SRC Date for the component
1098
1099 d the bb.data module
1100 """
1101 if "srcdate" in self.parm:
1102 return self.parm['srcdate']
1103
1104 pn = d.getVar("PN", True)
1105
1106 if pn:
1107 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1108
1109 return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1110
1111class FetchMethod(object):
1112 """Base class for 'fetch'ing data"""
1113
1114 def __init__(self, urls = []):
1115 self.urls = []
1116
1117 def supports(self, urldata, d):
1118 """
1119 Check to see if this fetch class supports a given url.
1120 """
1121 return 0
1122
1123 def localpath(self, urldata, d):
1124 """
1125 Return the local filename of a given url assuming a successful fetch.
1126 Can also setup variables in urldata for use in go (saving code duplication
1127 and duplicate code execution)
1128 """
1129 return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
1130
1131 def supports_checksum(self, urldata):
1132 """
1133 Is localpath something that can be represented by a checksum?
1134 """
1135
1136 # We cannot compute checksums for directories
1137 if os.path.isdir(urldata.localpath) == True:
1138 return False
1139 if urldata.localpath.find("*") != -1:
1140 return False
1141
1142 return True
1143
1144 def recommends_checksum(self, urldata):
1145 """
1146 Is the backend on where checksumming is recommended (should warnings
1147 be displayed if there is no checksum)?
1148 """
1149 return False
1150
1151 def _strip_leading_slashes(self, relpath):
1152 """
1153 Remove leading slash as os.path.join can't cope
1154 """
1155 while os.path.isabs(relpath):
1156 relpath = relpath[1:]
1157 return relpath
1158
1159 def setUrls(self, urls):
1160 self.__urls = urls
1161
1162 def getUrls(self):
1163 return self.__urls
1164
1165 urls = property(getUrls, setUrls, None, "Urls property")
1166
1167 def need_update(self, ud, d):
1168 """
1169 Force a fetch, even if localpath exists?
1170 """
1171 if os.path.exists(ud.localpath):
1172 return False
1173 return True
1174
1175 def supports_srcrev(self):
1176 """
1177 The fetcher supports auto source revisions (SRCREV)
1178 """
1179 return False
1180
1181 def download(self, urldata, d):
1182 """
1183 Fetch urls
1184 Assumes localpath was called first
1185 """
1186 raise NoMethodError(url)
1187
1188 def unpack(self, urldata, rootdir, data):
1189 iterate = False
1190 file = urldata.localpath
1191
1192 try:
1193 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1194 except ValueError as exc:
1195 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1196 (file, urldata.parm.get('unpack')))
1197
1198 dots = file.split(".")
1199 if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
1200 efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
1201 else:
1202 efile = file
1203 cmd = None
1204
1205 if unpack:
1206 if file.endswith('.tar'):
1207 cmd = 'tar x --no-same-owner -f %s' % file
1208 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1209 cmd = 'tar xz --no-same-owner -f %s' % file
1210 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1211 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1212 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1213 cmd = 'gzip -dc %s > %s' % (file, efile)
1214 elif file.endswith('.bz2'):
1215 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1216 elif file.endswith('.tar.xz'):
1217 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1218 elif file.endswith('.xz'):
1219 cmd = 'xz -dc %s > %s' % (file, efile)
1220 elif file.endswith('.zip') or file.endswith('.jar'):
1221 try:
1222 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1223 except ValueError as exc:
1224 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1225 (file, urldata.parm.get('dos')))
1226 cmd = 'unzip -q -o'
1227 if dos:
1228 cmd = '%s -a' % cmd
1229 cmd = "%s '%s'" % (cmd, file)
1230 elif file.endswith('.rpm') or file.endswith('.srpm'):
1231 if 'extract' in urldata.parm:
1232 unpack_file = urldata.parm.get('extract')
1233 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1234 iterate = True
1235 iterate_file = unpack_file
1236 else:
1237 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1238 elif file.endswith('.deb') or file.endswith('.ipk'):
1239 cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
1240
1241 if not unpack or not cmd:
1242 # If file == dest, then avoid any copies, as we already put the file into dest!
1243 dest = os.path.join(rootdir, os.path.basename(file))
1244 if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1245 if os.path.isdir(file):
1246 # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
1247 basepath = getattr(urldata, "basepath", None)
1248 destdir = "."
1249 if basepath and basepath.endswith("/"):
1250 basepath = basepath.rstrip("/")
1251 elif basepath:
1252 basepath = os.path.dirname(basepath)
1253 if basepath and basepath.find("/") != -1:
1254 destdir = basepath[:basepath.rfind('/')]
1255 destdir = destdir.strip('/')
1256 if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
1257 os.makedirs("%s/%s" % (rootdir, destdir))
1258 cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir)
1259 #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
1260 else:
1261 # The "destdir" handling was specifically done for FILESPATH
1262 # items. So, only do so for file:// entries.
1263 if urldata.type == "file" and urldata.path.find("/") != -1:
1264 destdir = urldata.path.rsplit("/", 1)[0]
1265 else:
1266 destdir = "."
1267 bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
1268 cmd = 'cp %s %s/%s/' % (file, rootdir, destdir)
1269
1270 if not cmd:
1271 return
1272
1273 # Change to subdir before executing command
1274 save_cwd = os.getcwd();
1275 os.chdir(rootdir)
1276 if 'subdir' in urldata.parm:
1277 newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
1278 bb.utils.mkdirhier(newdir)
1279 os.chdir(newdir)
1280
1281 path = data.getVar('PATH', True)
1282 if path:
1283 cmd = "PATH=\"%s\" %s" % (path, cmd)
1284 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
1285 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
1286
1287 os.chdir(save_cwd)
1288
1289 if ret != 0:
1290 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1291
1292 if iterate is True:
1293 iterate_urldata = urldata
1294 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1295 self.unpack(urldata, rootdir, data)
1296
1297 return
1298
1299 def clean(self, urldata, d):
1300 """
1301 Clean any existing full or partial download
1302 """
1303 bb.utils.remove(urldata.localpath)
1304
1305 def try_premirror(self, urldata, d):
1306 """
1307 Should premirrors be used?
1308 """
1309 return True
1310
1311 def checkstatus(self, urldata, d):
1312 """
1313 Check the status of a URL
1314 Assumes localpath was called first
1315 """
1316 logger.info("URL %s could not be checked for status since no method exists.", url)
1317 return True
1318
1319 def latest_revision(self, ud, d, name):
1320 """
1321 Look in the cache for the latest revision, if not present ask the SCM.
1322 """
1323 if not hasattr(self, "_latest_revision"):
1324 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1325
1326 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1327 key = self.generate_revision_key(ud, d, name)
1328 try:
1329 return revs[key]
1330 except KeyError:
1331 revs[key] = rev = self._latest_revision(ud, d, name)
1332 return rev
1333
1334 def sortable_revision(self, ud, d, name):
1335 latest_rev = self._build_revision(ud, d, name)
1336 return True, str(latest_rev)
1337
1338 def generate_revision_key(self, ud, d, name):
1339 key = self._revision_key(ud, d, name)
1340 return "%s-%s" % (key, d.getVar("PN", True) or "")
1341
1342class Fetch(object):
1343 def __init__(self, urls, d, cache = True, localonly = False):
1344 if localonly and cache:
1345 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1346
1347 if len(urls) == 0:
1348 urls = d.getVar("SRC_URI", True).split()
1349 self.urls = urls
1350 self.d = d
1351 self.ud = {}
1352
1353 fn = d.getVar('FILE', True)
1354 if cache and fn and fn in urldata_cache:
1355 self.ud = urldata_cache[fn]
1356
1357 for url in urls:
1358 if url not in self.ud:
1359 try:
1360 self.ud[url] = FetchData(url, d, localonly)
1361 except NonLocalMethod:
1362 if localonly:
1363 self.ud[url] = None
1364 pass
1365
1366 if fn and cache:
1367 urldata_cache[fn] = self.ud
1368
1369 def localpath(self, url):
1370 if url not in self.urls:
1371 self.ud[url] = FetchData(url, self.d)
1372
1373 self.ud[url].setup_localpath(self.d)
1374 return self.d.expand(self.ud[url].localpath)
1375
1376 def localpaths(self):
1377 """
1378 Return a list of the local filenames, assuming successful fetch
1379 """
1380 local = []
1381
1382 for u in self.urls:
1383 ud = self.ud[u]
1384 ud.setup_localpath(self.d)
1385 local.append(ud.localpath)
1386
1387 return local
1388
1389 def download(self, urls = []):
1390 """
1391 Fetch all urls
1392 """
1393 if len(urls) == 0:
1394 urls = self.urls
1395
1396 network = self.d.getVar("BB_NO_NETWORK", True)
1397 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1398
1399 for u in urls:
1400 ud = self.ud[u]
1401 ud.setup_localpath(self.d)
1402 m = ud.method
1403 localpath = ""
1404
1405 lf = bb.utils.lockfile(ud.lockfile)
1406
1407 try:
1408 self.d.setVar("BB_NO_NETWORK", network)
1409
1410 if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
1411 localpath = ud.localpath
1412 elif m.try_premirror(ud, self.d):
1413 logger.debug(1, "Trying PREMIRRORS")
1414 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1415 localpath = try_mirrors(self.d, ud, mirrors, False)
1416
1417 if premirroronly:
1418 self.d.setVar("BB_NO_NETWORK", "1")
1419
1420 os.chdir(self.d.getVar("DL_DIR", True))
1421
1422 firsterr = None
1423 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
1424 try:
1425 logger.debug(1, "Trying Upstream")
1426 m.download(ud, self.d)
1427 if hasattr(m, "build_mirror_data"):
1428 m.build_mirror_data(ud, self.d)
1429 localpath = ud.localpath
1430 # early checksum verify, so that if checksum mismatched,
1431 # fetcher still have chance to fetch from mirror
1432 update_stamp(ud, self.d)
1433
1434 except bb.fetch2.NetworkAccess:
1435 raise
1436
1437 except BBFetchException as e:
1438 if isinstance(e, ChecksumError):
1439 logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1440 logger.debug(1, str(e))
1441 rename_bad_checksum(ud, e.checksum)
1442 elif isinstance(e, NoChecksumError):
1443 raise
1444 else:
1445 logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1446 logger.debug(1, str(e))
1447 firsterr = e
1448 # Remove any incomplete fetch
1449 m.clean(ud, self.d)
1450 logger.debug(1, "Trying MIRRORS")
1451 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1452 localpath = try_mirrors (self.d, ud, mirrors)
1453
1454 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1455 if firsterr:
1456 logger.error(str(firsterr))
1457 raise FetchError("Unable to fetch URL from any source.", u)
1458
1459 update_stamp(ud, self.d)
1460
1461 except BBFetchException as e:
1462 if isinstance(e, ChecksumError):
1463 logger.error("Checksum failure fetching %s" % u)
1464 raise
1465
1466 finally:
1467 bb.utils.unlockfile(lf)
1468
1469 def checkstatus(self, urls = []):
1470 """
1471 Check all urls exist upstream
1472 """
1473
1474 if len(urls) == 0:
1475 urls = self.urls
1476
1477 for u in urls:
1478 ud = self.ud[u]
1479 ud.setup_localpath(self.d)
1480 m = ud.method
1481 logger.debug(1, "Testing URL %s", u)
1482 # First try checking uri, u, from PREMIRRORS
1483 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1484 ret = try_mirrors(self.d, ud, mirrors, True)
1485 if not ret:
1486 # Next try checking from the original uri, u
1487 try:
1488 ret = m.checkstatus(ud, self.d)
1489 except:
1490 # Finally, try checking uri, u, from MIRRORS
1491 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1492 ret = try_mirrors(self.d, ud, mirrors, True)
1493
1494 if not ret:
1495 raise FetchError("URL %s doesn't work" % u, u)
1496
1497 def unpack(self, root, urls = []):
1498 """
1499 Check all urls exist upstream
1500 """
1501
1502 if len(urls) == 0:
1503 urls = self.urls
1504
1505 for u in urls:
1506 ud = self.ud[u]
1507 ud.setup_localpath(self.d)
1508
1509 if self.d.expand(self.localpath) is None:
1510 continue
1511
1512 if ud.lockfile:
1513 lf = bb.utils.lockfile(ud.lockfile)
1514
1515 ud.method.unpack(ud, root, self.d)
1516
1517 if ud.lockfile:
1518 bb.utils.unlockfile(lf)
1519
1520 def clean(self, urls = []):
1521 """
1522 Clean files that the fetcher gets or places
1523 """
1524
1525 if len(urls) == 0:
1526 urls = self.urls
1527
1528 for url in urls:
1529 if url not in self.ud:
1530 self.ud[url] = FetchData(url, d)
1531 ud = self.ud[url]
1532 ud.setup_localpath(self.d)
1533
1534 if not ud.localfile and ud.localpath is None:
1535 continue
1536
1537 if ud.lockfile:
1538 lf = bb.utils.lockfile(ud.lockfile)
1539
1540 ud.method.clean(ud, self.d)
1541 if ud.donestamp:
1542 bb.utils.remove(ud.donestamp)
1543
1544 if ud.lockfile:
1545 bb.utils.unlockfile(lf)
1546
1547from . import cvs
1548from . import git
1549from . import gitsm
1550from . import gitannex
1551from . import local
1552from . import svn
1553from . import wget
1554from . import ssh
1555from . import sftp
1556from . import perforce
1557from . import bzr
1558from . import hg
1559from . import osc
1560from . import repo
1561
1562methods.append(local.Local())
1563methods.append(wget.Wget())
1564methods.append(svn.Svn())
1565methods.append(git.Git())
1566methods.append(gitsm.GitSM())
1567methods.append(gitannex.GitANNEX())
1568methods.append(cvs.Cvs())
1569methods.append(ssh.SSH())
1570methods.append(sftp.SFTP())
1571methods.append(perforce.Perforce())
1572methods.append(bzr.Bzr())
1573methods.append(hg.Hg())
1574methods.append(osc.Osc())
1575methods.append(repo.Repo())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
new file mode 100644
index 0000000000..03e9ac461b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/bzr.py
@@ -0,0 +1,143 @@
1"""
2BitBake 'Fetch' implementation for bzr.
3
4"""
5
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 Richard Purdie
8#
9# Classes for obtaining upstream sources for the
10# BitBake build tools.
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import sys
28import logging
29import bb
30from bb import data
31from bb.fetch2 import FetchMethod
32from bb.fetch2 import FetchError
33from bb.fetch2 import runfetchcmd
34from bb.fetch2 import logger
35
36class Bzr(FetchMethod):
37 def supports(self, ud, d):
38 return ud.type in ['bzr']
39
40 def urldata_init(self, ud, d):
41 """
42 init bzr specific variable within url data
43 """
44 # Create paths to bzr checkouts
45 relpath = self._strip_leading_slashes(ud.path)
46 ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
47
48 ud.setup_revisons(d)
49
50 if not ud.revision:
51 ud.revision = self.latest_revision(ud, d)
52
53 ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
54
55 def _buildbzrcommand(self, ud, d, command):
56 """
57 Build up an bzr commandline based on ud
58 command is "fetch", "update", "revno"
59 """
60
61 basecmd = data.expand('${FETCHCMD_bzr}', d)
62
63 proto = ud.parm.get('protocol', 'http')
64
65 bzrroot = ud.host + ud.path
66
67 options = []
68
69 if command == "revno":
70 bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
71 else:
72 if ud.revision:
73 options.append("-r %s" % ud.revision)
74
75 if command == "fetch":
76 bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
77 elif command == "update":
78 bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
79 else:
80 raise FetchError("Invalid bzr command %s" % command, ud.url)
81
82 return bzrcmd
83
84 def download(self, ud, d):
85 """Fetch url"""
86
87 if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
88 bzrcmd = self._buildbzrcommand(ud, d, "update")
89 logger.debug(1, "BZR Update %s", ud.url)
90 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
91 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
92 runfetchcmd(bzrcmd, d)
93 else:
94 bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
95 bzrcmd = self._buildbzrcommand(ud, d, "fetch")
96 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
97 logger.debug(1, "BZR Checkout %s", ud.url)
98 bb.utils.mkdirhier(ud.pkgdir)
99 os.chdir(ud.pkgdir)
100 logger.debug(1, "Running %s", bzrcmd)
101 runfetchcmd(bzrcmd, d)
102
103 os.chdir(ud.pkgdir)
104
105 scmdata = ud.parm.get("scmdata", "")
106 if scmdata == "keep":
107 tar_flags = ""
108 else:
109 tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
110
111 # tar them up to a defined filename
112 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
113
114 def supports_srcrev(self):
115 return True
116
117 def _revision_key(self, ud, d, name):
118 """
119 Return a unique key for the url
120 """
121 return "bzr:" + ud.pkgdir
122
123 def _latest_revision(self, ud, d, name):
124 """
125 Return the latest upstream revision number
126 """
127 logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
128
129 bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
130
131 output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
132
133 return output.strip()
134
135 def sortable_revision(self, ud, d, name):
136 """
137 Return a sortable revision number which in our case is the revision number
138 """
139
140 return False, self._build_revision(ud, d)
141
142 def _build_revision(self, ud, d):
143 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
new file mode 100644
index 0000000000..d27d96f68c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -0,0 +1,171 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27#
28
29import os
30import logging
31import bb
32from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
33from bb.fetch2 import runfetchcmd
34
35class Cvs(FetchMethod):
36 """
37 Class to fetch a module or modules from cvs repositories
38 """
39 def supports(self, ud, d):
40 """
41 Check to see if a given url can be fetched with cvs.
42 """
43 return ud.type in ['cvs']
44
45 def urldata_init(self, ud, d):
46 if not "module" in ud.parm:
47 raise MissingParameterError("module", ud.url)
48 ud.module = ud.parm["module"]
49
50 ud.tag = ud.parm.get('tag', "")
51
52 # Override the default date in certain cases
53 if 'date' in ud.parm:
54 ud.date = ud.parm['date']
55 elif ud.tag:
56 ud.date = ""
57
58 norecurse = ''
59 if 'norecurse' in ud.parm:
60 norecurse = '_norecurse'
61
62 fullpath = ''
63 if 'fullpath' in ud.parm:
64 fullpath = '_fullpath'
65
66 ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
67
68 def need_update(self, ud, d):
69 if (ud.date == "now"):
70 return True
71 if not os.path.exists(ud.localpath):
72 return True
73 return False
74
75 def download(self, ud, d):
76
77 method = ud.parm.get('method', 'pserver')
78 localdir = ud.parm.get('localdir', ud.module)
79 cvs_port = ud.parm.get('port', '')
80
81 cvs_rsh = None
82 if method == "ext":
83 if "rsh" in ud.parm:
84 cvs_rsh = ud.parm["rsh"]
85
86 if method == "dir":
87 cvsroot = ud.path
88 else:
89 cvsroot = ":" + method
90 cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
91 if cvsproxyhost:
92 cvsroot += ";proxy=" + cvsproxyhost
93 cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
94 if cvsproxyport:
95 cvsroot += ";proxyport=" + cvsproxyport
96 cvsroot += ":" + ud.user
97 if ud.pswd:
98 cvsroot += ":" + ud.pswd
99 cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
100
101 options = []
102 if 'norecurse' in ud.parm:
103 options.append("-l")
104 if ud.date:
105 # treat YYYYMMDDHHMM specially for CVS
106 if len(ud.date) == 12:
107 options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
108 else:
109 options.append("-D \"%s UTC\"" % ud.date)
110 if ud.tag:
111 options.append("-r %s" % ud.tag)
112
113 cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
114 cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
115 cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
116
117 if cvs_rsh:
118 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
119 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
120
121 # create module directory
122 logger.debug(2, "Fetch: checking for module directory")
123 pkg = d.getVar('PN', True)
124 pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
125 moddir = os.path.join(pkgdir, localdir)
126 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
127 logger.info("Update " + ud.url)
128 bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
129 # update sources there
130 os.chdir(moddir)
131 cmd = cvsupdatecmd
132 else:
133 logger.info("Fetch " + ud.url)
134 # check out sources there
135 bb.utils.mkdirhier(pkgdir)
136 os.chdir(pkgdir)
137 logger.debug(1, "Running %s", cvscmd)
138 bb.fetch2.check_network_access(d, cvscmd, ud.url)
139 cmd = cvscmd
140
141 runfetchcmd(cmd, d, cleanup = [moddir])
142
143 if not os.access(moddir, os.R_OK):
144 raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
145
146 scmdata = ud.parm.get("scmdata", "")
147 if scmdata == "keep":
148 tar_flags = ""
149 else:
150 tar_flags = "--exclude 'CVS'"
151
152 # tar them up to a defined filename
153 if 'fullpath' in ud.parm:
154 os.chdir(pkgdir)
155 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
156 else:
157 os.chdir(moddir)
158 os.chdir('..')
159 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
160
161 runfetchcmd(cmd, d, cleanup = [ud.localpath])
162
163 def clean(self, ud, d):
164 """ Clean CVS Files and tarballs """
165
166 pkg = d.getVar('PN', True)
167 pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
168
169 bb.utils.remove(pkgdir, True)
170 bb.utils.remove(ud.localpath)
171
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
new file mode 100644
index 0000000000..9ca24428a1
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -0,0 +1,355 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git implementation
5
6git fetcher support the SRC_URI with format of:
7SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
8
9Supported SRC_URI options are:
10
11- branch
12 The git branch to retrieve from. The default is "master"
13
14 This option also supports multiple branch fetching, with branches
15 separated by commas. In multiple branches case, the name option
16 must have the same number of names to match the branches, which is
17 used to specify the SRC_REV for the branch
18 e.g:
19 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
20 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
21 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
22
23- tag
24 The git tag to retrieve. The default is "master"
25
26- protocol
27 The method to use to access the repository. Common options are "git",
28 "http", "https", "file", "ssh" and "rsync". The default is "git".
29
30- rebaseable
31 rebaseable indicates that the upstream git repo may rebase in the future,
32 and current revision may disappear from upstream repo. This option will
33 remind fetcher to preserve local cache carefully for future use.
34 The default value is "0", set rebaseable=1 for rebaseable git repo.
35
36- nocheckout
37 Don't checkout source code when unpacking. set this option for the recipe
38 who has its own routine to checkout code.
39 The default is "0", set nocheckout=1 if needed.
40
41- bareclone
42 Create a bare clone of the source code and don't checkout the source code
43 when unpacking. Set this option for the recipe who has its own routine to
44 checkout code and tracking branch requirements.
45 The default is "0", set bareclone=1 if needed.
46
47- nobranch
48 Don't check the SHA validation for branch. set this option for the recipe
49 referring to commit which is valid in tag instead of branch.
50 The default is "0", set nobranch=1 if needed.
51
52"""
53
54#Copyright (C) 2005 Richard Purdie
55#
56# This program is free software; you can redistribute it and/or modify
57# it under the terms of the GNU General Public License version 2 as
58# published by the Free Software Foundation.
59#
60# This program is distributed in the hope that it will be useful,
61# but WITHOUT ANY WARRANTY; without even the implied warranty of
62# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
63# GNU General Public License for more details.
64#
65# You should have received a copy of the GNU General Public License along
66# with this program; if not, write to the Free Software Foundation, Inc.,
67# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
68
69import os
70import bb
71from bb import data
72from bb.fetch2 import FetchMethod
73from bb.fetch2 import runfetchcmd
74from bb.fetch2 import logger
75
76class Git(FetchMethod):
77 """Class to fetch a module or modules from git repositories"""
78 def init(self, d):
79 pass
80
81 def supports(self, ud, d):
82 """
83 Check to see if a given url can be fetched with git.
84 """
85 return ud.type in ['git']
86
87 def supports_checksum(self, urldata):
88 return False
89
90 def urldata_init(self, ud, d):
91 """
92 init git specific variable within url data
93 so that the git method like latest_revision() can work
94 """
95 if 'protocol' in ud.parm:
96 ud.proto = ud.parm['protocol']
97 elif not ud.host:
98 ud.proto = 'file'
99 else:
100 ud.proto = "git"
101
102 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
103 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
104
105 ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
106
107 ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
108
109 ud.nobranch = ud.parm.get("nobranch","0") == "1"
110
111 # bareclone implies nocheckout
112 ud.bareclone = ud.parm.get("bareclone","0") == "1"
113 if ud.bareclone:
114 ud.nocheckout = 1
115
116 ud.unresolvedrev = {}
117 branches = ud.parm.get("branch", "master").split(',')
118 if len(branches) != len(ud.names):
119 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
120 ud.branches = {}
121 for name in ud.names:
122 branch = branches[ud.names.index(name)]
123 ud.branches[name] = branch
124 ud.unresolvedrev[name] = branch
125
126 ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
127
128 ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
129
130 ud.setup_revisons(d)
131
132 for name in ud.names:
133 # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
134 if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
135 if ud.revisions[name]:
136 ud.unresolvedrev[name] = ud.revisions[name]
137 ud.revisions[name] = self.latest_revision(ud, d, name)
138
139 gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
140 # for rebaseable git repo, it is necessary to keep mirror tar ball
141 # per revision, so that even the revision disappears from the
142 # upstream repo in the future, the mirror will remain intact and still
143 # contains the revision
144 if ud.rebaseable:
145 for name in ud.names:
146 gitsrcname = gitsrcname + '_' + ud.revisions[name]
147 ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
148 ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
149 gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
150 ud.clonedir = os.path.join(gitdir, gitsrcname)
151
152 ud.localfile = ud.clonedir
153
154 def localpath(self, ud, d):
155 return ud.clonedir
156
157 def need_update(self, ud, d):
158 if not os.path.exists(ud.clonedir):
159 return True
160 os.chdir(ud.clonedir)
161 for name in ud.names:
162 if not self._contains_ref(ud, d, name):
163 return True
164 if ud.write_tarballs and not os.path.exists(ud.fullmirror):
165 return True
166 return False
167
168 def try_premirror(self, ud, d):
169 # If we don't do this, updating an existing checkout with only premirrors
170 # is not possible
171 if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
172 return True
173 if os.path.exists(ud.clonedir):
174 return False
175 return True
176
177 def download(self, ud, d):
178 """Fetch url"""
179
180 if ud.user:
181 username = ud.user + '@'
182 else:
183 username = ""
184
185 ud.repochanged = not os.path.exists(ud.fullmirror)
186
187 # If the checkout doesn't exist and the mirror tarball does, extract it
188 if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
189 bb.utils.mkdirhier(ud.clonedir)
190 os.chdir(ud.clonedir)
191 runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
192
193 repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
194
195 # If the repo still doesn't exist, fallback to cloning it
196 if not os.path.exists(ud.clonedir):
197 # We do this since git will use a "-l" option automatically for local urls where possible
198 if repourl.startswith("file://"):
199 repourl = repourl[7:]
200 clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
201 if ud.proto.lower() != 'file':
202 bb.fetch2.check_network_access(d, clone_cmd)
203 runfetchcmd(clone_cmd, d)
204
205 os.chdir(ud.clonedir)
206 # Update the checkout if needed
207 needupdate = False
208 for name in ud.names:
209 if not self._contains_ref(ud, d, name):
210 needupdate = True
211 if needupdate:
212 try:
213 runfetchcmd("%s remote rm origin" % ud.basecmd, d)
214 except bb.fetch2.FetchError:
215 logger.debug(1, "No Origin")
216
217 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
218 fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
219 if ud.proto.lower() != 'file':
220 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
221 runfetchcmd(fetch_cmd, d)
222 runfetchcmd("%s prune-packed" % ud.basecmd, d)
223 runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
224 ud.repochanged = True
225 os.chdir(ud.clonedir)
226 for name in ud.names:
227 if not self._contains_ref(ud, d, name):
228 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
229
230 def build_mirror_data(self, ud, d):
231 # Generate a mirror tarball if needed
232 if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
233 # it's possible that this symlink points to read-only filesystem with PREMIRROR
234 if os.path.islink(ud.fullmirror):
235 os.unlink(ud.fullmirror)
236
237 os.chdir(ud.clonedir)
238 logger.info("Creating tarball of git repository")
239 runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
240 runfetchcmd("touch %s.done" % (ud.fullmirror), d)
241
242 def unpack(self, ud, destdir, d):
243 """ unpack the downloaded src to destdir"""
244
245 subdir = ud.parm.get("subpath", "")
246 if subdir != "":
247 readpathspec = ":%s" % (subdir)
248 def_destsuffix = "%s/" % os.path.basename(subdir)
249 else:
250 readpathspec = ""
251 def_destsuffix = "git/"
252
253 destsuffix = ud.parm.get("destsuffix", def_destsuffix)
254 destdir = ud.destdir = os.path.join(destdir, destsuffix)
255 if os.path.exists(destdir):
256 bb.utils.prunedir(destdir)
257
258 cloneflags = "-s -n"
259 if ud.bareclone:
260 cloneflags += " --mirror"
261
262 # Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
263 # and you end up with some horrible union of the two when you attempt to clone it
264 # The least invasive workaround seems to be a symlink to the real directory to
265 # fool git into ignoring any .git version that may also be present.
266 #
267 # The issue is fixed in more recent versions of git so we can drop this hack in future
268 # when that version becomes common enough.
269 clonedir = ud.clonedir
270 if not ud.path.endswith(".git"):
271 indirectiondir = destdir[:-1] + ".indirectionsymlink"
272 if os.path.exists(indirectiondir):
273 os.remove(indirectiondir)
274 bb.utils.mkdirhier(os.path.dirname(indirectiondir))
275 os.symlink(ud.clonedir, indirectiondir)
276 clonedir = indirectiondir
277
278 runfetchcmd("git clone %s %s/ %s" % (cloneflags, clonedir, destdir), d)
279 if not ud.nocheckout:
280 os.chdir(destdir)
281 if subdir != "":
282 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
283 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
284 else:
285 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
286 return True
287
288 def clean(self, ud, d):
289 """ clean the git directory """
290
291 bb.utils.remove(ud.localpath, True)
292 bb.utils.remove(ud.fullmirror)
293 bb.utils.remove(ud.fullmirror + ".done")
294
295 def supports_srcrev(self):
296 return True
297
298 def _contains_ref(self, ud, d, name):
299 cmd = ""
300 if ud.nobranch:
301 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
302 ud.basecmd, ud.revisions[name])
303 else:
304 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
305 ud.basecmd, ud.revisions[name], ud.branches[name])
306 try:
307 output = runfetchcmd(cmd, d, quiet=True)
308 except bb.fetch2.FetchError:
309 return False
310 if len(output.split()) > 1:
311 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
312 return output.split()[0] != "0"
313
314 def _revision_key(self, ud, d, name):
315 """
316 Return a unique key for the url
317 """
318 return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
319
320 def _lsremote(self, ud, d, search):
321 """
322 Run git ls-remote with the specified search string
323 """
324 if ud.user:
325 username = ud.user + '@'
326 else:
327 username = ""
328
329 cmd = "%s ls-remote %s://%s%s%s %s" % \
330 (ud.basecmd, ud.proto, username, ud.host, ud.path, search)
331 if ud.proto.lower() != 'file':
332 bb.fetch2.check_network_access(d, cmd)
333 output = runfetchcmd(cmd, d, True)
334 if not output:
335 raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
336 return output
337
338 def _latest_revision(self, ud, d, name):
339 """
340 Compute the HEAD revision for the url
341 """
342 search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
343 output = self._lsremote(ud, d, search)
344 return output.split()[0]
345
346 def _build_revision(self, ud, d, name):
347 return ud.revisions[name]
348
349 def checkstatus(self, ud, d):
350 fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
351 try:
352 runfetchcmd(fetchcmd, d, quiet=True)
353 return True
354 except FetchError:
355 return False
diff --git a/bitbake/lib/bb/fetch2/gitannex.py b/bitbake/lib/bb/fetch2/gitannex.py
new file mode 100644
index 0000000000..0f37897450
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitannex.py
@@ -0,0 +1,76 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git annex implementation
5"""
6
7# Copyright (C) 2014 Otavio Salvador
8# Copyright (C) 2014 O.S. Systems Software LTDA.
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import os
24import bb
25from bb import data
26from bb.fetch2.git import Git
27from bb.fetch2 import runfetchcmd
28from bb.fetch2 import logger
29
30class GitANNEX(Git):
31 def supports(self, ud, d):
32 """
33 Check to see if a given url can be fetched with git.
34 """
35 return ud.type in ['gitannex']
36
37 def uses_annex(self, ud, d):
38 for name in ud.names:
39 try:
40 runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
41 return True
42 except bb.fetch.FetchError:
43 pass
44
45 return False
46
47 def update_annex(self, ud, d):
48 try:
49 runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
50 except bb.fetch.FetchError:
51 return False
52 runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
53
54 return True
55
56 def download(self, ud, d):
57 Git.download(self, ud, d)
58
59 os.chdir(ud.clonedir)
60 annex = self.uses_annex(ud, d)
61 if annex:
62 self.update_annex(ud, d)
63
64 def unpack(self, ud, destdir, d):
65 Git.unpack(self, ud, destdir, d)
66
67 os.chdir(ud.destdir)
68 try:
69 runfetchcmd("%s annex sync" % (ud.basecmd), d)
70 except bb.fetch.FetchError:
71 pass
72
73 annex = self.uses_annex(ud, d)
74 if annex:
75 runfetchcmd("%s annex get" % (ud.basecmd), d)
76 runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
new file mode 100644
index 0000000000..1a762153c4
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -0,0 +1,126 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git submodules implementation
5"""
6
7# Copyright (C) 2013 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import os
23import bb
24from bb import data
25from bb.fetch2.git import Git
26from bb.fetch2 import runfetchcmd
27from bb.fetch2 import logger
28
29class GitSM(Git):
30 def supports(self, ud, d):
31 """
32 Check to see if a given url can be fetched with git.
33 """
34 return ud.type in ['gitsm']
35
36 def uses_submodules(self, ud, d):
37 for name in ud.names:
38 try:
39 runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
40 return True
41 except bb.fetch.FetchError:
42 pass
43 return False
44
45 def _set_relative_paths(self, repopath):
46 """
47 Fix submodule paths to be relative instead of absolute,
48 so that when we move the repo it doesn't break
49 (In Git 1.7.10+ this is done automatically)
50 """
51 submodules = []
52 with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
53 for line in f.readlines():
54 if line.startswith('[submodule'):
55 submodules.append(line.split('"')[1])
56
57 for module in submodules:
58 repo_conf = os.path.join(repopath, module, '.git')
59 if os.path.exists(repo_conf):
60 with open(repo_conf, 'r') as f:
61 lines = f.readlines()
62 newpath = ''
63 for i, line in enumerate(lines):
64 if line.startswith('gitdir:'):
65 oldpath = line.split(': ')[-1].rstrip()
66 if oldpath.startswith('/'):
67 newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
68 lines[i] = 'gitdir: %s\n' % newpath
69 break
70 if newpath:
71 with open(repo_conf, 'w') as f:
72 for line in lines:
73 f.write(line)
74
75 repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
76 if os.path.exists(repo_conf2):
77 with open(repo_conf2, 'r') as f:
78 lines = f.readlines()
79 newpath = ''
80 for i, line in enumerate(lines):
81 if line.lstrip().startswith('worktree = '):
82 oldpath = line.split(' = ')[-1].rstrip()
83 if oldpath.startswith('/'):
84 newpath = '../' * (module.count('/') + 3) + module
85 lines[i] = '\tworktree = %s\n' % newpath
86 break
87 if newpath:
88 with open(repo_conf2, 'w') as f:
89 for line in lines:
90 f.write(line)
91
92 def update_submodules(self, ud, d):
93 # We have to convert bare -> full repo, do the submodule bit, then convert back
94 tmpclonedir = ud.clonedir + ".tmp"
95 gitdir = tmpclonedir + os.sep + ".git"
96 bb.utils.remove(tmpclonedir, True)
97 os.mkdir(tmpclonedir)
98 os.rename(ud.clonedir, gitdir)
99 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
100 os.chdir(tmpclonedir)
101 runfetchcmd(ud.basecmd + " reset --hard", d)
102 runfetchcmd(ud.basecmd + " submodule init", d)
103 runfetchcmd(ud.basecmd + " submodule update", d)
104 self._set_relative_paths(tmpclonedir)
105 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
106 os.rename(gitdir, ud.clonedir,)
107 bb.utils.remove(tmpclonedir, True)
108
109 def download(self, ud, d):
110 Git.download(self, ud, d)
111
112 os.chdir(ud.clonedir)
113 submodules = self.uses_submodules(ud, d)
114 if submodules:
115 self.update_submodules(ud, d)
116
117 def unpack(self, ud, destdir, d):
118 Git.unpack(self, ud, destdir, d)
119
120 os.chdir(ud.destdir)
121 submodules = self.uses_submodules(ud, d)
122 if submodules:
123 runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d)
124 runfetchcmd(ud.basecmd + " submodule init", d)
125 runfetchcmd(ud.basecmd + " submodule update", d)
126
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
new file mode 100644
index 0000000000..6927f6111e
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -0,0 +1,187 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for mercurial DRCS (hg).
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10# Copyright (C) 2007 Robert Schuster
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27import os
28import sys
29import logging
30import bb
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import runfetchcmd
36from bb.fetch2 import logger
37
38class Hg(FetchMethod):
39 """Class to fetch from mercurial repositories"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with mercurial.
43 """
44 return ud.type in ['hg']
45
46 def urldata_init(self, ud, d):
47 """
48 init hg specific variable within url data
49 """
50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url)
52
53 ud.module = ud.parm["module"]
54
55 # Create paths to mercurial checkouts
56 relpath = self._strip_leading_slashes(ud.path)
57 ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
58 ud.moddir = os.path.join(ud.pkgdir, ud.module)
59
60 ud.setup_revisons(d)
61
62 if 'rev' in ud.parm:
63 ud.revision = ud.parm['rev']
64 elif not ud.revision:
65 ud.revision = self.latest_revision(ud, d)
66
67 ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
68
69 def need_update(self, ud, d):
70 revTag = ud.parm.get('rev', 'tip')
71 if revTag == "tip":
72 return True
73 if not os.path.exists(ud.localpath):
74 return True
75 return False
76
77 def _buildhgcommand(self, ud, d, command):
78 """
79 Build up an hg commandline based on ud
80 command is "fetch", "update", "info"
81 """
82
83 basecmd = data.expand('${FETCHCMD_hg}', d)
84
85 proto = ud.parm.get('protocol', 'http')
86
87 host = ud.host
88 if proto == "file":
89 host = "/"
90 ud.host = "localhost"
91
92 if not ud.user:
93 hgroot = host + ud.path
94 else:
95 if ud.pswd:
96 hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path
97 else:
98 hgroot = ud.user + "@" + host + ud.path
99
100 if command == "info":
101 return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
102
103 options = [];
104
105 # Don't specify revision for the fetch; clone the entire repo.
106 # This avoids an issue if the specified revision is a tag, because
107 # the tag actually exists in the specified revision + 1, so it won't
108 # be available when used in any successive commands.
109 if ud.revision and command != "fetch":
110 options.append("-r %s" % ud.revision)
111
112 if command == "fetch":
113 cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
114 elif command == "pull":
115 # do not pass options list; limiting pull to rev causes the local
116 # repo not to contain it and immediately following "update" command
117 # will crash
118 if ud.user and ud.pswd:
119 cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto)
120 else:
121 cmd = "%s pull" % (basecmd)
122 elif command == "update":
123 cmd = "%s update -C %s" % (basecmd, " ".join(options))
124 else:
125 raise FetchError("Invalid hg command %s" % command, ud.url)
126
127 return cmd
128
129 def download(self, ud, d):
130 """Fetch url"""
131
132 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
133
134 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
135 updatecmd = self._buildhgcommand(ud, d, "pull")
136 logger.info("Update " + ud.url)
137 # update sources there
138 os.chdir(ud.moddir)
139 logger.debug(1, "Running %s", updatecmd)
140 bb.fetch2.check_network_access(d, updatecmd, ud.url)
141 runfetchcmd(updatecmd, d)
142
143 else:
144 fetchcmd = self._buildhgcommand(ud, d, "fetch")
145 logger.info("Fetch " + ud.url)
146 # check out sources there
147 bb.utils.mkdirhier(ud.pkgdir)
148 os.chdir(ud.pkgdir)
149 logger.debug(1, "Running %s", fetchcmd)
150 bb.fetch2.check_network_access(d, fetchcmd, ud.url)
151 runfetchcmd(fetchcmd, d)
152
153 # Even when we clone (fetch), we still need to update as hg's clone
154 # won't checkout the specified revision if its on a branch
155 updatecmd = self._buildhgcommand(ud, d, "update")
156 os.chdir(ud.moddir)
157 logger.debug(1, "Running %s", updatecmd)
158 runfetchcmd(updatecmd, d)
159
160 scmdata = ud.parm.get("scmdata", "")
161 if scmdata == "keep":
162 tar_flags = ""
163 else:
164 tar_flags = "--exclude '.hg' --exclude '.hgrags'"
165
166 os.chdir(ud.pkgdir)
167 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
168
169 def supports_srcrev(self):
170 return True
171
172 def _latest_revision(self, ud, d, name):
173 """
174 Compute tip revision for the url
175 """
176 bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
177 output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
178 return output.strip()
179
180 def _build_revision(self, ud, d, name):
181 return ud.revision
182
183 def _revision_key(self, ud, d, name):
184 """
185 Return a unique key for the url
186 """
187 return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
new file mode 100644
index 0000000000..5c4e42a942
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -0,0 +1,116 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import urllib
30import bb
31import bb.utils
32from bb import data
33from bb.fetch2 import FetchMethod, FetchError
34from bb.fetch2 import logger
35
36class Local(FetchMethod):
37 def supports(self, urldata, d):
38 """
39 Check to see if a given url represents a local fetch.
40 """
41 return urldata.type in ['file']
42
43 def urldata_init(self, ud, d):
44 # We don't set localfile as for this fetcher the file is already local!
45 ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
46 ud.basename = os.path.basename(ud.decodedurl)
47 ud.basepath = ud.decodedurl
48 return
49
50 def localpath(self, urldata, d):
51 """
52 Return the local filename of a given url assuming a successful fetch.
53 """
54 path = urldata.decodedurl
55 newpath = path
56 if path[0] != "/":
57 filespath = data.getVar('FILESPATH', d, True)
58 if filespath:
59 logger.debug(2, "Searching for %s in paths: \n%s" % (path, "\n ".join(filespath.split(":"))))
60 newpath = bb.utils.which(filespath, path)
61 if not newpath:
62 filesdir = data.getVar('FILESDIR', d, True)
63 if filesdir:
64 logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
65 newpath = os.path.join(filesdir, path)
66 if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
67 # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
68 newpath = bb.utils.which(filespath, ".")
69 logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
70 return newpath
71 if not os.path.exists(newpath):
72 dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
73 logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
74 bb.utils.mkdirhier(os.path.dirname(dldirfile))
75 return dldirfile
76 return newpath
77
78 def need_update(self, ud, d):
79 if ud.url.find("*") != -1:
80 return False
81 if os.path.exists(ud.localpath):
82 return False
83 return True
84
85 def download(self, urldata, d):
86 """Fetch urls (no-op for Local method)"""
87 # no need to fetch local files, we'll deal with them in place.
88 if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
89 locations = []
90 filespath = data.getVar('FILESPATH', d, True)
91 if filespath:
92 locations = filespath.split(":")
93 filesdir = data.getVar('FILESDIR', d, True)
94 if filesdir:
95 locations.append(filesdir)
96 locations.append(d.getVar("DL_DIR", True))
97
98 msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
99 raise FetchError(msg)
100
101 return True
102
103 def checkstatus(self, urldata, d):
104 """
105 Check the status of the url
106 """
107 if urldata.localpath.find("*") != -1:
108 logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
109 return True
110 if os.path.exists(urldata.localpath):
111 return True
112 return False
113
114 def clean(self, urldata, d):
115 return
116
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
new file mode 100644
index 0000000000..3d8779682f
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -0,0 +1,135 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4Bitbake "Fetch" implementation for osc (Opensuse build service client).
5Based on the svn "Fetch" implementation.
6
7"""
8
9import os
10import sys
11import logging
12import bb
13from bb import data
14from bb.fetch2 import FetchMethod
15from bb.fetch2 import FetchError
16from bb.fetch2 import MissingParameterError
17from bb.fetch2 import runfetchcmd
18
19class Osc(FetchMethod):
20 """Class to fetch a module or modules from Opensuse build server
21 repositories."""
22
23 def supports(self, ud, d):
24 """
25 Check to see if a given url can be fetched with osc.
26 """
27 return ud.type in ['osc']
28
29 def urldata_init(self, ud, d):
30 if not "module" in ud.parm:
31 raise MissingParameterError('module', ud.url)
32
33 ud.module = ud.parm["module"]
34
35 # Create paths to osc checkouts
36 relpath = self._strip_leading_slashes(ud.path)
37 ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
38 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
39
40 if 'rev' in ud.parm:
41 ud.revision = ud.parm['rev']
42 else:
43 pv = data.getVar("PV", d, 0)
44 rev = bb.fetch2.srcrev_internal_helper(ud, d)
45 if rev and rev != True:
46 ud.revision = rev
47 else:
48 ud.revision = ""
49
50 ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
51
52 def _buildosccommand(self, ud, d, command):
53 """
54 Build up an ocs commandline based on ud
55 command is "fetch", "update", "info"
56 """
57
58 basecmd = data.expand('${FETCHCMD_osc}', d)
59
60 proto = ud.parm.get('protocol', 'ocs')
61
62 options = []
63
64 config = "-c %s" % self.generate_config(ud, d)
65
66 if ud.revision:
67 options.append("-r %s" % ud.revision)
68
69 coroot = self._strip_leading_slashes(ud.path)
70
71 if command == "fetch":
72 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
73 elif command == "update":
74 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
75 else:
76 raise FetchError("Invalid osc command %s" % command, ud.url)
77
78 return osccmd
79
80 def download(self, ud, d):
81 """
82 Fetch url
83 """
84
85 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
86
87 if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
88 oscupdatecmd = self._buildosccommand(ud, d, "update")
89 logger.info("Update "+ ud.url)
90 # update sources there
91 os.chdir(ud.moddir)
92 logger.debug(1, "Running %s", oscupdatecmd)
93 bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
94 runfetchcmd(oscupdatecmd, d)
95 else:
96 oscfetchcmd = self._buildosccommand(ud, d, "fetch")
97 logger.info("Fetch " + ud.url)
98 # check out sources there
99 bb.utils.mkdirhier(ud.pkgdir)
100 os.chdir(ud.pkgdir)
101 logger.debug(1, "Running %s", oscfetchcmd)
102 bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
103 runfetchcmd(oscfetchcmd, d)
104
105 os.chdir(os.path.join(ud.pkgdir + ud.path))
106 # tar them up to a defined filename
107 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
108
109 def supports_srcrev(self):
110 return False
111
112 def generate_config(self, ud, d):
113 """
114 Generate a .oscrc to be used for this run.
115 """
116
117 config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
118 if (os.path.exists(config_path)):
119 os.remove(config_path)
120
121 f = open(config_path, 'w')
122 f.write("[general]\n")
123 f.write("apisrv = %s\n" % ud.host)
124 f.write("scheme = http\n")
125 f.write("su-wrapper = su -c\n")
126 f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
127 f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
128 f.write("extra-pkgs = gzip\n")
129 f.write("\n")
130 f.write("[%s]\n" % ud.host)
131 f.write("user = %s\n" % ud.parm["user"])
132 f.write("pass = %s\n" % ud.parm["pswd"])
133 f.close()
134
135 return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
new file mode 100644
index 0000000000..9329d72779
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -0,0 +1,194 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from future_builtins import zip
29import os
30import subprocess
31import logging
32import bb
33from bb import data
34from bb.fetch2 import FetchMethod
35from bb.fetch2 import FetchError
36from bb.fetch2 import logger
37from bb.fetch2 import runfetchcmd
38
39class Perforce(FetchMethod):
40 def supports(self, ud, d):
41 return ud.type in ['p4']
42
43 def doparse(url, d):
44 parm = {}
45 path = url.split("://")[1]
46 delim = path.find("@");
47 if delim != -1:
48 (user, pswd, host, port) = path.split('@')[0].split(":")
49 path = path.split('@')[1]
50 else:
51 (host, port) = data.getVar('P4PORT', d).split(':')
52 user = ""
53 pswd = ""
54
55 if path.find(";") != -1:
56 keys=[]
57 values=[]
58 plist = path.split(';')
59 for item in plist:
60 if item.count('='):
61 (key, value) = item.split('=')
62 keys.append(key)
63 values.append(value)
64
65 parm = dict(zip(keys, values))
66 path = "//" + path.split(';')[0]
67 host += ":%s" % (port)
68 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
69
70 return host, path, user, pswd, parm
71 doparse = staticmethod(doparse)
72
73 def getcset(d, depot, host, user, pswd, parm):
74 p4opt = ""
75 if "cset" in parm:
76 return parm["cset"];
77 if user:
78 p4opt += " -u %s" % (user)
79 if pswd:
80 p4opt += " -P %s" % (pswd)
81 if host:
82 p4opt += " -p %s" % (host)
83
84 p4date = data.getVar("P4DATE", d, True)
85 if "revision" in parm:
86 depot += "#%s" % (parm["revision"])
87 elif "label" in parm:
88 depot += "@%s" % (parm["label"])
89 elif p4date:
90 depot += "@%s" % (p4date)
91
92 p4cmd = data.getVar('FETCHCMD_p4', d, True)
93 logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
94 p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
95 cset = p4file.strip()
96 logger.debug(1, "READ %s", cset)
97 if not cset:
98 return -1
99
100 return cset.split(' ')[1]
101 getcset = staticmethod(getcset)
102
103 def urldata_init(self, ud, d):
104 (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
105
106 # If a label is specified, we use that as our filename
107
108 if "label" in parm:
109 ud.localfile = "%s.tar.gz" % (parm["label"])
110 return
111
112 base = path
113 which = path.find('/...')
114 if which != -1:
115 base = path[:which-1]
116
117 base = self._strip_leading_slashes(base)
118
119 cset = Perforce.getcset(d, path, host, user, pswd, parm)
120
121 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
122
123 def download(self, ud, d):
124 """
125 Fetch urls
126 """
127
128 (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
129
130 if depot.find('/...') != -1:
131 path = depot[:depot.find('/...')]
132 else:
133 path = depot
134
135 module = parm.get('module', os.path.basename(path))
136
137 # Get the p4 command
138 p4opt = ""
139 if user:
140 p4opt += " -u %s" % (user)
141
142 if pswd:
143 p4opt += " -P %s" % (pswd)
144
145 if host:
146 p4opt += " -p %s" % (host)
147
148 p4cmd = data.getVar('FETCHCMD_p4', d, True)
149
150 # create temp directory
151 logger.debug(2, "Fetch: creating temporary directory")
152 bb.utils.mkdirhier(d.expand('${WORKDIR}'))
153 mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
154 tmpfile, errors = bb.process.run(mktemp)
155 tmpfile = tmpfile.strip()
156 if not tmpfile:
157 raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
158
159 if "label" in parm:
160 depot = "%s@%s" % (depot, parm["label"])
161 else:
162 cset = Perforce.getcset(d, depot, host, user, pswd, parm)
163 depot = "%s@%s" % (depot, cset)
164
165 os.chdir(tmpfile)
166 logger.info("Fetch " + ud.url)
167 logger.info("%s%s files %s", p4cmd, p4opt, depot)
168 p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
169 p4file = [f.rstrip() for f in p4file.splitlines()]
170
171 if not p4file:
172 raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
173
174 count = 0
175
176 for file in p4file:
177 list = file.split()
178
179 if list[2] == "delete":
180 continue
181
182 dest = list[0][len(path)+1:]
183 where = dest.find("#")
184
185 subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
186 count = count + 1
187
188 if count == 0:
189 logger.error()
190 raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
191
192 runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
193 # cleanup
194 bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
new file mode 100644
index 0000000000..21678eb7d9
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -0,0 +1,98 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake "Fetch" repo (git) implementation
5
6"""
7
8# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
9#
10# Based on git.py which is:
11#Copyright (C) 2005 Richard Purdie
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import bb
28from bb import data
29from bb.fetch2 import FetchMethod
30from bb.fetch2 import runfetchcmd
31
32class Repo(FetchMethod):
33 """Class to fetch a module or modules from repo (git) repositories"""
34 def supports(self, ud, d):
35 """
36 Check to see if a given url can be fetched with repo.
37 """
38 return ud.type in ["repo"]
39
40 def urldata_init(self, ud, d):
41 """
42 We don"t care about the git rev of the manifests repository, but
43 we do care about the manifest to use. The default is "default".
44 We also care about the branch or tag to be used. The default is
45 "master".
46 """
47
48 ud.proto = ud.parm.get('protocol', 'git')
49 ud.branch = ud.parm.get('branch', 'master')
50 ud.manifest = ud.parm.get('manifest', 'default.xml')
51 if not ud.manifest.endswith('.xml'):
52 ud.manifest += '.xml'
53
54 ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
55
56 def download(self, ud, d):
57 """Fetch url"""
58
59 if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
60 logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
61 return
62
63 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
64 repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
65 codir = os.path.join(repodir, gitsrcname, ud.manifest)
66
67 if ud.user:
68 username = ud.user + "@"
69 else:
70 username = ""
71
72 bb.utils.mkdirhier(os.path.join(codir, "repo"))
73 os.chdir(os.path.join(codir, "repo"))
74 if not os.path.exists(os.path.join(codir, "repo", ".repo")):
75 bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
76 runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
77
78 bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
79 runfetchcmd("repo sync", d)
80 os.chdir(codir)
81
82 scmdata = ud.parm.get("scmdata", "")
83 if scmdata == "keep":
84 tar_flags = ""
85 else:
86 tar_flags = "--exclude '.repo' --exclude '.git'"
87
88 # Create a cache
89 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
90
91 def supports_srcrev(self):
92 return False
93
94 def _build_revision(self, ud, d):
95 return ud.manifest
96
97 def _want_sortable_revision(self, ud, d):
98 return False
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
new file mode 100644
index 0000000000..8ea4ef2ff3
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -0,0 +1,129 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake SFTP Fetch implementation
5
6Class for fetching files via SFTP. It tries to adhere to the (now
7expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
8Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
9(SSH)" (SECSH URI).
10
11It uses SFTP (as to adhere to the SECSH URI specification). It only
12supports key based authentication, not password. This class, unlike
13the SSH fetcher, does not support fetching a directory tree from the
14remote.
15
16 http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
17 https://www.iana.org/assignments/uri-schemes/prov/sftp
18 https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
19
20Please note that '/' is used as host path seperator, and not ":"
21as you may be used to from the scp/sftp commands. You can use a
22~ (tilde) to specify a path relative to your home directory.
23(The /~user/ syntax, for specyfing a path relative to another
24user's home directory is not supported.) Note that the tilde must
25still follow the host path seperator ("/"). See exampels below.
26
27Example SRC_URIs:
28
29SRC_URI = "sftp://host.example.com/dir/path.file.txt"
30
31A path relative to your home directory.
32
33SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
34
35You can also specify a username (specyfing password in the
36URI is not supported, use SSH keys to authenticate):
37
38SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
39
40"""
41
42# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
43#
44# Based in part on bb.fetch2.wget:
45# Copyright (C) 2003, 2004 Chris Larson
46#
47# This program is free software; you can redistribute it and/or modify
48# it under the terms of the GNU General Public License version 2 as
49# published by the Free Software Foundation.
50#
51# This program is distributed in the hope that it will be useful,
52# but WITHOUT ANY WARRANTY; without even the implied warranty of
53# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
54# GNU General Public License for more details.
55#
56# You should have received a copy of the GNU General Public License along
57# with this program; if not, write to the Free Software Foundation, Inc.,
58# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
59#
60# Based on functions from the base bb module, Copyright 2003 Holger Schurig
61
62import os
63import bb
64import urllib
65import commands
66from bb import data
67from bb.fetch2 import URI
68from bb.fetch2 import FetchMethod
69from bb.fetch2 import runfetchcmd
70
71
72class SFTP(FetchMethod):
73 """Class to fetch urls via 'sftp'"""
74
75 def supports(self, ud, d):
76 """
77 Check to see if a given url can be fetched with sftp.
78 """
79 return ud.type in ['sftp']
80
81 def recommends_checksum(self, urldata):
82 return True
83
84 def urldata_init(self, ud, d):
85 if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
86 raise bb.fetch2.ParameterError(
87 "Invalid protocol - if you wish to fetch from a " +
88 "git repository using ssh, you need to use the " +
89 "git:// prefix with protocol=ssh", ud.url)
90
91 if 'downloadfilename' in ud.parm:
92 ud.basename = ud.parm['downloadfilename']
93 else:
94 ud.basename = os.path.basename(ud.path)
95
96 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
97
98 def download(self, ud, d):
99 """Fetch urls"""
100
101 urlo = URI(ud.url)
102 basecmd = 'sftp -oPasswordAuthentication=no'
103 port = ''
104 if urlo.port:
105 port = '-P %d' % urlo.port
106 urlo.port = None
107
108 dldir = data.getVar('DL_DIR', d, True)
109 lpath = os.path.join(dldir, ud.localfile)
110
111 user = ''
112 if urlo.userinfo:
113 user = urlo.userinfo + '@'
114
115 path = urlo.path
116
117 # Supoprt URIs relative to the user's home directory, with
118 # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
119 if path[:3] == '/~/':
120 path = path[3:]
121
122 remote = '%s%s:%s' % (user, urlo.hostname, path)
123
124 cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
125 commands.mkarg(lpath))
126
127 bb.fetch2.check_network_access(d, cmd, ud.url)
128 runfetchcmd(cmd, d)
129 return True
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
new file mode 100644
index 0000000000..4ae979472c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -0,0 +1,127 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3'''
4BitBake 'Fetch' implementations
5
6This implementation is for Secure Shell (SSH), and attempts to comply with the
7IETF secsh internet draft:
8 http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
9
10 Currently does not support the sftp parameters, as this uses scp
11 Also does not support the 'fingerprint' connection parameter.
12
13 Please note that '/' is used as host, path separator not ':' as you may
14 be used to, also '~' can be used to specify user HOME, but again after '/'
15
16 Example SRC_URI:
17 SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
18 SRC_URI = "ssh://user@host.example.com/~/file.txt"
19'''
20
21# Copyright (C) 2006 OpenedHand Ltd.
22#
23#
24# Based in part on svk.py:
25# Copyright (C) 2006 Holger Hans Peter Freyther
26# Based on svn.py:
27# Copyright (C) 2003, 2004 Chris Larson
28# Based on functions from the base bb module:
29# Copyright 2003 Holger Schurig
30#
31#
32# This program is free software; you can redistribute it and/or modify
33# it under the terms of the GNU General Public License version 2 as
34# published by the Free Software Foundation.
35#
36# This program is distributed in the hope that it will be useful,
37# but WITHOUT ANY WARRANTY; without even the implied warranty of
38# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
39# GNU General Public License for more details.
40#
41# You should have received a copy of the GNU General Public License along
42# with this program; if not, write to the Free Software Foundation, Inc.,
43# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
44
45import re, os
46from bb import data
47from bb.fetch2 import FetchMethod
48from bb.fetch2 import FetchError
49from bb.fetch2 import logger
50from bb.fetch2 import runfetchcmd
51
52
53__pattern__ = re.compile(r'''
54 \s* # Skip leading whitespace
55 ssh:// # scheme
56 ( # Optional username/password block
57 (?P<user>\S+) # username
58 (:(?P<pass>\S+))? # colon followed by the password (optional)
59 )?
60 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
61 @
62 (?P<host>\S+?) # non-greedy match of the host
63 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
64 /
65 (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
66 # and may include the use of '~' to reference the remote home
67 # directory
68 (?P<sparam>(;[^;]+)*)? # parameters block (optional)
69 $
70''', re.VERBOSE)
71
72class SSH(FetchMethod):
73 '''Class to fetch a module or modules via Secure Shell'''
74
75 def supports(self, urldata, d):
76 return __pattern__.match(urldata.url) != None
77
78 def supports_checksum(self, urldata):
79 return False
80
81 def urldata_init(self, urldata, d):
82 if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
83 raise bb.fetch2.ParameterError(
84 "Invalid protocol - if you wish to fetch from a git " +
85 "repository using ssh, you need to use " +
86 "git:// prefix with protocol=ssh", urldata.url)
87 m = __pattern__.match(urldata.url)
88 path = m.group('path')
89 host = m.group('host')
90 urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
91
92 def download(self, urldata, d):
93 dldir = d.getVar('DL_DIR', True)
94
95 m = __pattern__.match(urldata.url)
96 path = m.group('path')
97 host = m.group('host')
98 port = m.group('port')
99 user = m.group('user')
100 password = m.group('pass')
101
102 if port:
103 portarg = '-P %s' % port
104 else:
105 portarg = ''
106
107 if user:
108 fr = user
109 if password:
110 fr += ':%s' % password
111 fr += '@%s' % host
112 else:
113 fr = host
114 fr += ':%s' % path
115
116
117 import commands
118 cmd = 'scp -B -r %s %s %s/' % (
119 portarg,
120 commands.mkarg(fr),
121 commands.mkarg(dldir)
122 )
123
124 bb.fetch2.check_network_access(d, cmd, urldata.url)
125
126 runfetchcmd(cmd, d)
127
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
new file mode 100644
index 0000000000..8847461913
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -0,0 +1,191 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for svn.
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23#
24# Based on functions from the base bb module, Copyright 2003 Holger Schurig
25
26import os
27import sys
28import logging
29import bb
30import re
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import runfetchcmd
36from bb.fetch2 import logger
37
38class Svn(FetchMethod):
39 """Class to fetch a module or modules from svn repositories"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with svn.
43 """
44 return ud.type in ['svn']
45
46 def urldata_init(self, ud, d):
47 """
48 init svn specific variable within url data
49 """
50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url)
52
53 ud.basecmd = d.getVar('FETCHCMD_svn', True)
54
55 ud.module = ud.parm["module"]
56
57 # Create paths to svn checkouts
58 relpath = self._strip_leading_slashes(ud.path)
59 ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
60 ud.moddir = os.path.join(ud.pkgdir, ud.module)
61
62 ud.setup_revisons(d)
63
64 if 'rev' in ud.parm:
65 ud.revision = ud.parm['rev']
66
67 ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
68
69 def _buildsvncommand(self, ud, d, command):
70 """
71 Build up an svn commandline based on ud
72 command is "fetch", "update", "info"
73 """
74
75 proto = ud.parm.get('protocol', 'svn')
76
77 svn_rsh = None
78 if proto == "svn+ssh" and "rsh" in ud.parm:
79 svn_rsh = ud.parm["rsh"]
80
81 svnroot = ud.host + ud.path
82
83 options = []
84
85 options.append("--no-auth-cache")
86
87 if ud.user:
88 options.append("--username %s" % ud.user)
89
90 if ud.pswd:
91 options.append("--password %s" % ud.pswd)
92
93 if command == "info":
94 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
95 elif command == "log1":
96 svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
97 else:
98 suffix = ""
99 if ud.revision:
100 options.append("-r %s" % ud.revision)
101 suffix = "@%s" % (ud.revision)
102
103 if command == "fetch":
104 svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
105 elif command == "update":
106 svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
107 else:
108 raise FetchError("Invalid svn command %s" % command, ud.url)
109
110 if svn_rsh:
111 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
112
113 return svncmd
114
115 def download(self, ud, d):
116 """Fetch url"""
117
118 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
119
120 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
121 svnupdatecmd = self._buildsvncommand(ud, d, "update")
122 logger.info("Update " + ud.url)
123 # update sources there
124 os.chdir(ud.moddir)
125 # We need to attempt to run svn upgrade first in case its an older working format
126 try:
127 runfetchcmd(ud.basecmd + " upgrade", d)
128 except FetchError:
129 pass
130 logger.debug(1, "Running %s", svnupdatecmd)
131 bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
132 runfetchcmd(svnupdatecmd, d)
133 else:
134 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
135 logger.info("Fetch " + ud.url)
136 # check out sources there
137 bb.utils.mkdirhier(ud.pkgdir)
138 os.chdir(ud.pkgdir)
139 logger.debug(1, "Running %s", svnfetchcmd)
140 bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
141 runfetchcmd(svnfetchcmd, d)
142
143 scmdata = ud.parm.get("scmdata", "")
144 if scmdata == "keep":
145 tar_flags = ""
146 else:
147 tar_flags = "--exclude '.svn'"
148
149 os.chdir(ud.pkgdir)
150 # tar them up to a defined filename
151 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
152
153 def clean(self, ud, d):
154 """ Clean SVN specific files and dirs """
155
156 bb.utils.remove(ud.localpath)
157 bb.utils.remove(ud.moddir, True)
158
159
160 def supports_srcrev(self):
161 return True
162
163 def _revision_key(self, ud, d, name):
164 """
165 Return a unique key for the url
166 """
167 return "svn:" + ud.moddir
168
169 def _latest_revision(self, ud, d, name):
170 """
171 Return the latest upstream revision number
172 """
173 bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"))
174
175 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True)
176
177 # skip the first line, as per output of svn log
178 # then we expect the revision on the 2nd line
179 revision = re.search('^r([0-9]*)', output.splitlines()[1]).group(1)
180
181 return revision
182
183 def sortable_revision(self, ud, d, name):
184 """
185 Return a sortable revision number which in our case is the revision number
186 """
187
188 return False, self._build_revision(ud, d)
189
190 def _build_revision(self, ud, d):
191 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
new file mode 100644
index 0000000000..0456490368
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -0,0 +1,106 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import logging
30import bb
31import urllib
32from bb import data
33from bb.fetch2 import FetchMethod
34from bb.fetch2 import FetchError
35from bb.fetch2 import logger
36from bb.fetch2 import runfetchcmd
37
38class Wget(FetchMethod):
39 """Class to fetch urls via 'wget'"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with wget.
43 """
44 return ud.type in ['http', 'https', 'ftp']
45
46 def recommends_checksum(self, urldata):
47 return True
48
49 def urldata_init(self, ud, d):
50 if 'protocol' in ud.parm:
51 if ud.parm['protocol'] == 'git':
52 raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
53
54 if 'downloadfilename' in ud.parm:
55 ud.basename = ud.parm['downloadfilename']
56 else:
57 ud.basename = os.path.basename(ud.path)
58
59 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
60
61 self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
62
63 def _runwget(self, ud, d, command, quiet):
64
65 logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
66 bb.fetch2.check_network_access(d, command)
67 runfetchcmd(command, d, quiet)
68
69 def download(self, ud, d):
70 """Fetch urls"""
71
72 fetchcmd = self.basecmd
73
74 if 'downloadfilename' in ud.parm:
75 dldir = d.getVar("DL_DIR", True)
76 bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
77 fetchcmd += " -O " + dldir + os.sep + ud.localfile
78
79 uri = ud.url.split(";")[0]
80 if os.path.exists(ud.localpath):
81 # file exists, but we didnt complete it.. trying again..
82 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
83 else:
84 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
85
86 self._runwget(ud, d, fetchcmd, False)
87
88 # Sanity check since wget can pretend it succeed when it didn't
89 # Also, this used to happen if sourceforge sent us to the mirror page
90 if not os.path.exists(ud.localpath):
91 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
92
93 if os.path.getsize(ud.localpath) == 0:
94 os.remove(ud.localpath)
95 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
96
97 return True
98
99 def checkstatus(self, ud, d):
100
101 uri = ud.url.split(";")[0]
102 fetchcmd = self.basecmd + " --spider '%s'" % uri
103
104 self._runwget(ud, d, fetchcmd, True)
105
106 return True
diff --git a/bitbake/lib/bb/methodpool.py b/bitbake/lib/bb/methodpool.py
new file mode 100644
index 0000000000..bf2e9f5542
--- /dev/null
+++ b/bitbake/lib/bb/methodpool.py
@@ -0,0 +1,29 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4#
5# Copyright (C) 2006 Holger Hans Peter Freyther
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20from bb.utils import better_compile, better_exec
21
22def insert_method(modulename, code, fn):
23 """
24 Add code of a module should be added. The methods
25 will be simply added, no checking will be done
26 """
27 comp = better_compile(code, modulename, fn )
28 better_exec(comp, None, code, fn)
29
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py
new file mode 100644
index 0000000000..fca43eefd0
--- /dev/null
+++ b/bitbake/lib/bb/monitordisk.py
@@ -0,0 +1,265 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2012 Robert Yang
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20import os, logging, re, sys
21import bb
22logger = logging.getLogger("BitBake.Monitor")
23
24def printErr(info):
25 logger.error("%s\n Disk space monitor will NOT be enabled" % info)
26
27def convertGMK(unit):
28
29 """ Convert the space unit G, M, K, the unit is case-insensitive """
30
31 unitG = re.match('([1-9][0-9]*)[gG]\s?$', unit)
32 if unitG:
33 return int(unitG.group(1)) * (1024 ** 3)
34 unitM = re.match('([1-9][0-9]*)[mM]\s?$', unit)
35 if unitM:
36 return int(unitM.group(1)) * (1024 ** 2)
37 unitK = re.match('([1-9][0-9]*)[kK]\s?$', unit)
38 if unitK:
39 return int(unitK.group(1)) * 1024
40 unitN = re.match('([1-9][0-9]*)\s?$', unit)
41 if unitN:
42 return int(unitN.group(1))
43 else:
44 return None
45
46def getMountedDev(path):
47
48 """ Get the device mounted at the path, uses /proc/mounts """
49
50 # Get the mount point of the filesystem containing path
51 # st_dev is the ID of device containing file
52 parentDev = os.stat(path).st_dev
53 currentDev = parentDev
54 # When the current directory's device is different from the
55 # parrent's, then the current directory is a mount point
56 while parentDev == currentDev:
57 mountPoint = path
58 # Use dirname to get the parrent's directory
59 path = os.path.dirname(path)
60 # Reach the "/"
61 if path == mountPoint:
62 break
63 parentDev= os.stat(path).st_dev
64
65 try:
66 with open("/proc/mounts", "r") as ifp:
67 for line in ifp:
68 procLines = line.rstrip('\n').split()
69 if procLines[1] == mountPoint:
70 return procLines[0]
71 except EnvironmentError:
72 pass
73 return None
74
75def getDiskData(BBDirs, configuration):
76
77 """Prepare disk data for disk space monitor"""
78
79 # Save the device IDs, need the ID to be unique (the dictionary's key is
80 # unique), so that when more than one directories are located in the same
81 # device, we just monitor it once
82 devDict = {}
83 for pathSpaceInode in BBDirs.split():
84 # The input format is: "dir,space,inode", dir is a must, space
85 # and inode are optional
86 pathSpaceInodeRe = re.match('([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
87 if not pathSpaceInodeRe:
88 printErr("Invalid value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
89 return None
90
91 action = pathSpaceInodeRe.group(1)
92 if action not in ("ABORT", "STOPTASKS", "WARN"):
93 printErr("Unknown disk space monitor action: %s" % action)
94 return None
95
96 path = os.path.realpath(pathSpaceInodeRe.group(2))
97 if not path:
98 printErr("Invalid path value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
99 return None
100
101 # The disk space or inode is optional, but it should have a correct
102 # value once it is specified
103 minSpace = pathSpaceInodeRe.group(3)
104 if minSpace:
105 minSpace = convertGMK(minSpace)
106 if not minSpace:
107 printErr("Invalid disk space value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(3))
108 return None
109 else:
110 # None means that it is not specified
111 minSpace = None
112
113 minInode = pathSpaceInodeRe.group(4)
114 if minInode:
115 minInode = convertGMK(minInode)
116 if not minInode:
117 printErr("Invalid inode value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(4))
118 return None
119 else:
120 # None means that it is not specified
121 minInode = None
122
123 if minSpace is None and minInode is None:
124 printErr("No disk space or inode value in found BB_DISKMON_DIRS: %s" % pathSpaceInode)
125 return None
126 # mkdir for the directory since it may not exist, for example the
127 # DL_DIR may not exist at the very beginning
128 if not os.path.exists(path):
129 bb.utils.mkdirhier(path)
130 dev = getMountedDev(path)
131 # Use path/action as the key
132 devDict[os.path.join(path, action)] = [dev, minSpace, minInode]
133
134 return devDict
135
136def getInterval(configuration):
137
138 """ Get the disk space interval """
139
140 # The default value is 50M and 5K.
141 spaceDefault = 50 * 1024 * 1024
142 inodeDefault = 5 * 1024
143
144 interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", True)
145 if not interval:
146 return spaceDefault, inodeDefault
147 else:
148 # The disk space or inode interval is optional, but it should
149 # have a correct value once it is specified
150 intervalRe = re.match('([^,]*),?\s*(.*)', interval)
151 if intervalRe:
152 intervalSpace = intervalRe.group(1)
153 if intervalSpace:
154 intervalSpace = convertGMK(intervalSpace)
155 if not intervalSpace:
156 printErr("Invalid disk space interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(1))
157 return None, None
158 else:
159 intervalSpace = spaceDefault
160 intervalInode = intervalRe.group(2)
161 if intervalInode:
162 intervalInode = convertGMK(intervalInode)
163 if not intervalInode:
164 printErr("Invalid disk inode interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(2))
165 return None, None
166 else:
167 intervalInode = inodeDefault
168 return intervalSpace, intervalInode
169 else:
170 printErr("Invalid interval value in BB_DISKMON_WARNINTERVAL: %s" % interval)
171 return None, None
172
173class diskMonitor:
174
175 """Prepare the disk space monitor data"""
176
177 def __init__(self, configuration):
178
179 self.enableMonitor = False
180 self.configuration = configuration
181
182 BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None
183 if BBDirs:
184 self.devDict = getDiskData(BBDirs, configuration)
185 if self.devDict:
186 self.spaceInterval, self.inodeInterval = getInterval(configuration)
187 if self.spaceInterval and self.inodeInterval:
188 self.enableMonitor = True
189 # These are for saving the previous disk free space and inode, we
190 # use them to avoid print too many warning messages
191 self.preFreeS = {}
192 self.preFreeI = {}
193 # This is for STOPTASKS and ABORT, to avoid print the message repeatly
194 # during waiting the tasks to finish
195 self.checked = {}
196 for k in self.devDict:
197 self.preFreeS[k] = 0
198 self.preFreeI[k] = 0
199 self.checked[k] = False
200 if self.spaceInterval is None and self.inodeInterval is None:
201 self.enableMonitor = False
202
203 def check(self, rq):
204
205 """ Take action for the monitor """
206
207 if self.enableMonitor:
208 for k in self.devDict:
209 path = os.path.dirname(k)
210 action = os.path.basename(k)
211 dev = self.devDict[k][0]
212 minSpace = self.devDict[k][1]
213 minInode = self.devDict[k][2]
214
215 st = os.statvfs(path)
216
217 # The free space, float point number
218 freeSpace = st.f_bavail * st.f_frsize
219
220 if minSpace and freeSpace < minSpace:
221 # Always show warning, the self.checked would always be False if the action is WARN
222 if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
223 logger.warn("The free space of %s (%s) is running low (%.3fGB left)" % \
224 (path, dev, freeSpace / 1024 / 1024 / 1024.0))
225 self.preFreeS[k] = freeSpace
226
227 if action == "STOPTASKS" and not self.checked[k]:
228 logger.error("No new tasks can be executed since the disk space monitor action is \"STOPTASKS\"!")
229 self.checked[k] = True
230 rq.finish_runqueue(False)
231 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
232 elif action == "ABORT" and not self.checked[k]:
233 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
234 self.checked[k] = True
235 rq.finish_runqueue(True)
236 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
237
238 # The free inodes, float point number
239 freeInode = st.f_favail
240
241 if minInode and freeInode < minInode:
242 # Some fs formats' (e.g., btrfs) statvfs.f_files (inodes) is
243 # zero, this is a feature of the fs, we disable the inode
244 # checking for such a fs.
245 if st.f_files == 0:
246 logger.info("Inode check for %s is unavaliable, will remove it from disk monitor" % path)
247 self.devDict[k][2] = None
248 continue
249 # Always show warning, the self.checked would always be False if the action is WARN
250 if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]:
251 logger.warn("The free inode of %s (%s) is running low (%.3fK left)" % \
252 (path, dev, freeInode / 1024.0))
253 self.preFreeI[k] = freeInode
254
255 if action == "STOPTASKS" and not self.checked[k]:
256 logger.error("No new tasks can be executed since the disk space monitor action is \"STOPTASKS\"!")
257 self.checked[k] = True
258 rq.finish_runqueue(False)
259 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
260 elif action == "ABORT" and not self.checked[k]:
261 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
262 self.checked[k] = True
263 rq.finish_runqueue(True)
264 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
265 return
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
new file mode 100644
index 0000000000..d79768db24
--- /dev/null
+++ b/bitbake/lib/bb/msg.py
@@ -0,0 +1,196 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'msg' implementation
5
6Message handling infrastructure for bitbake
7
8"""
9
10# Copyright (C) 2006 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import sys
26import copy
27import logging
28import collections
29from itertools import groupby
30import warnings
31import bb
32import bb.event
33
34class BBLogFormatter(logging.Formatter):
35 """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is"""
36
37 DEBUG3 = logging.DEBUG - 2
38 DEBUG2 = logging.DEBUG - 1
39 DEBUG = logging.DEBUG
40 VERBOSE = logging.INFO - 1
41 NOTE = logging.INFO
42 PLAIN = logging.INFO + 1
43 ERROR = logging.ERROR
44 WARNING = logging.WARNING
45 CRITICAL = logging.CRITICAL
46
47 levelnames = {
48 DEBUG3 : 'DEBUG',
49 DEBUG2 : 'DEBUG',
50 DEBUG : 'DEBUG',
51 VERBOSE: 'NOTE',
52 NOTE : 'NOTE',
53 PLAIN : '',
54 WARNING : 'WARNING',
55 ERROR : 'ERROR',
56 CRITICAL: 'ERROR',
57 }
58
59 color_enabled = False
60 BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(29,38)
61
62 COLORS = {
63 DEBUG3 : CYAN,
64 DEBUG2 : CYAN,
65 DEBUG : CYAN,
66 VERBOSE : BASECOLOR,
67 NOTE : BASECOLOR,
68 PLAIN : BASECOLOR,
69 WARNING : YELLOW,
70 ERROR : RED,
71 CRITICAL: RED,
72 }
73
74 BLD = '\033[1;%dm'
75 STD = '\033[%dm'
76 RST = '\033[0m'
77
78 def getLevelName(self, levelno):
79 try:
80 return self.levelnames[levelno]
81 except KeyError:
82 self.levelnames[levelno] = value = 'Level %d' % levelno
83 return value
84
85 def format(self, record):
86 record.levelname = self.getLevelName(record.levelno)
87 if record.levelno == self.PLAIN:
88 msg = record.getMessage()
89 else:
90 if self.color_enabled:
91 record = self.colorize(record)
92 msg = logging.Formatter.format(self, record)
93
94 if hasattr(record, 'bb_exc_info'):
95 etype, value, tb = record.bb_exc_info
96 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
97 msg += '\n' + ''.join(formatted)
98 return msg
99
100 def colorize(self, record):
101 color = self.COLORS[record.levelno]
102 if self.color_enabled and color is not None:
103 record = copy.copy(record)
104 record.levelname = "".join([self.BLD % color, record.levelname, self.RST])
105 record.msg = "".join([self.STD % color, record.msg, self.RST])
106 return record
107
108 def enable_color(self):
109 self.color_enabled = True
110
111class BBLogFilter(object):
112 def __init__(self, handler, level, debug_domains):
113 self.stdlevel = level
114 self.debug_domains = debug_domains
115 loglevel = level
116 for domain in debug_domains:
117 if debug_domains[domain] < loglevel:
118 loglevel = debug_domains[domain]
119 handler.setLevel(loglevel)
120 handler.addFilter(self)
121
122 def filter(self, record):
123 if record.levelno >= self.stdlevel:
124 return True
125 if record.name in self.debug_domains and record.levelno >= self.debug_domains[record.name]:
126 return True
127 return False
128
129class BBLogFilterStdErr(BBLogFilter):
130 def filter(self, record):
131 if not BBLogFilter.filter(self, record):
132 return False
133 if record.levelno >= logging.ERROR:
134 return True
135 return False
136
137class BBLogFilterStdOut(BBLogFilter):
138 def filter(self, record):
139 if not BBLogFilter.filter(self, record):
140 return False
141 if record.levelno < logging.ERROR:
142 return True
143 return False
144
145# Message control functions
146#
147
148loggerDefaultDebugLevel = 0
149loggerDefaultVerbose = False
150loggerVerboseLogs = False
151loggerDefaultDomains = []
152
153def init_msgconfig(verbose, debug, debug_domains = []):
154 """
155 Set default verbosity and debug levels config the logger
156 """
157 bb.msg.loggerDefaultDebugLevel = debug
158 bb.msg.loggerDefaultVerbose = verbose
159 if verbose:
160 bb.msg.loggerVerboseLogs = True
161 bb.msg.loggerDefaultDomains = debug_domains
162
163def constructLogOptions():
164 debug = loggerDefaultDebugLevel
165 verbose = loggerDefaultVerbose
166 domains = loggerDefaultDomains
167
168 if debug:
169 level = BBLogFormatter.DEBUG - debug + 1
170 elif verbose:
171 level = BBLogFormatter.VERBOSE
172 else:
173 level = BBLogFormatter.NOTE
174
175 debug_domains = {}
176 for (domainarg, iterator) in groupby(domains):
177 dlevel = len(tuple(iterator))
178 debug_domains["BitBake.%s" % domainarg] = logging.DEBUG - dlevel + 1
179 return level, debug_domains
180
181def addDefaultlogFilter(handler, cls = BBLogFilter):
182 level, debug_domains = constructLogOptions()
183
184 cls(handler, level, debug_domains)
185
186#
187# Message handling functions
188#
189
190def fatal(msgdomain, msg):
191 if msgdomain:
192 logger = logging.getLogger("BitBake.%s" % msgdomain)
193 else:
194 logger = logging.getLogger("BitBake")
195 logger.critical(msg)
196 sys.exit(1)
diff --git a/bitbake/lib/bb/namedtuple_with_abc.py b/bitbake/lib/bb/namedtuple_with_abc.py
new file mode 100644
index 0000000000..f5e0a3f3d5
--- /dev/null
+++ b/bitbake/lib/bb/namedtuple_with_abc.py
@@ -0,0 +1,255 @@
1# http://code.activestate.com/recipes/577629-namedtupleabc-abstract-base-class-mix-in-for-named/
2#!/usr/bin/env python
3# Copyright (c) 2011 Jan Kaliszewski (zuo). Available under the MIT License.
4
5"""
6namedtuple_with_abc.py:
7* named tuple mix-in + ABC (abstract base class) recipe,
8* works under Python 2.6, 2.7 as well as 3.x.
9
10Import this module to patch collections.namedtuple() factory function
11-- enriching it with the 'abc' attribute (an abstract base class + mix-in
12for named tuples) and decorating it with a wrapper that registers each
13newly created named tuple as a subclass of namedtuple.abc.
14
15How to import:
16 import collections, namedtuple_with_abc
17or:
18 import namedtuple_with_abc
19 from collections import namedtuple
20 # ^ in this variant you must import namedtuple function
21 # *after* importing namedtuple_with_abc module
22or simply:
23 from namedtuple_with_abc import namedtuple
24
25Simple usage example:
26 class Credentials(namedtuple.abc):
27 _fields = 'username password'
28 def __str__(self):
29 return ('{0.__class__.__name__}'
30 '(username={0.username}, password=...)'.format(self))
31 print(Credentials("alice", "Alice's password"))
32
33For more advanced examples -- see below the "if __name__ == '__main__':".
34"""
35
36import collections
37from abc import ABCMeta, abstractproperty
38from functools import wraps
39from sys import version_info
40
41__all__ = ('namedtuple',)
42_namedtuple = collections.namedtuple
43
44
45class _NamedTupleABCMeta(ABCMeta):
46 '''The metaclass for the abstract base class + mix-in for named tuples.'''
47 def __new__(mcls, name, bases, namespace):
48 fields = namespace.get('_fields')
49 for base in bases:
50 if fields is not None:
51 break
52 fields = getattr(base, '_fields', None)
53 if not isinstance(fields, abstractproperty):
54 basetuple = _namedtuple(name, fields)
55 bases = (basetuple,) + bases
56 namespace.pop('_fields', None)
57 namespace.setdefault('__doc__', basetuple.__doc__)
58 namespace.setdefault('__slots__', ())
59 return ABCMeta.__new__(mcls, name, bases, namespace)
60
61
62exec(
63 # Python 2.x metaclass declaration syntax
64 """class _NamedTupleABC(object):
65 '''The abstract base class + mix-in for named tuples.'''
66 __metaclass__ = _NamedTupleABCMeta
67 _fields = abstractproperty()""" if version_info[0] < 3 else
68 # Python 3.x metaclass declaration syntax
69 """class _NamedTupleABC(metaclass=_NamedTupleABCMeta):
70 '''The abstract base class + mix-in for named tuples.'''
71 _fields = abstractproperty()"""
72)
73
74
75_namedtuple.abc = _NamedTupleABC
76#_NamedTupleABC.register(type(version_info)) # (and similar, in the future...)
77
78@wraps(_namedtuple)
79def namedtuple(*args, **kwargs):
80 '''Named tuple factory with namedtuple.abc subclass registration.'''
81 cls = _namedtuple(*args, **kwargs)
82 _NamedTupleABC.register(cls)
83 return cls
84
85collections.namedtuple = namedtuple
86
87
88
89
90if __name__ == '__main__':
91
92 '''Examples and explanations'''
93
94 # Simple usage
95
96 class MyRecord(namedtuple.abc):
97 _fields = 'x y z' # such form will be transformed into ('x', 'y', 'z')
98 def _my_custom_method(self):
99 return list(self._asdict().items())
100 # (the '_fields' attribute belongs to the named tuple public API anyway)
101
102 rec = MyRecord(1, 2, 3)
103 print(rec)
104 print(rec._my_custom_method())
105 print(rec._replace(y=222))
106 print(rec._replace(y=222)._my_custom_method())
107
108 # Custom abstract classes...
109
110 class MyAbstractRecord(namedtuple.abc):
111 def _my_custom_method(self):
112 return list(self._asdict().items())
113
114 try:
115 MyAbstractRecord() # (abstract classes cannot be instantiated)
116 except TypeError as exc:
117 print(exc)
118
119 class AnotherAbstractRecord(MyAbstractRecord):
120 def __str__(self):
121 return '<<<{0}>>>'.format(super(AnotherAbstractRecord,
122 self).__str__())
123
124 # ...and their non-abstract subclasses
125
126 class MyRecord2(MyAbstractRecord):
127 _fields = 'a, b'
128
129 class MyRecord3(AnotherAbstractRecord):
130 _fields = 'p', 'q', 'r'
131
132 rec2 = MyRecord2('foo', 'bar')
133 print(rec2)
134 print(rec2._my_custom_method())
135 print(rec2._replace(b=222))
136 print(rec2._replace(b=222)._my_custom_method())
137
138 rec3 = MyRecord3('foo', 'bar', 'baz')
139 print(rec3)
140 print(rec3._my_custom_method())
141 print(rec3._replace(q=222))
142 print(rec3._replace(q=222)._my_custom_method())
143
144 # You can also subclass non-abstract ones...
145
146 class MyRecord33(MyRecord3):
147 def __str__(self):
148 return '< {0!r}, ..., {0!r} >'.format(self.p, self.r)
149
150 rec33 = MyRecord33('foo', 'bar', 'baz')
151 print(rec33)
152 print(rec33._my_custom_method())
153 print(rec33._replace(q=222))
154 print(rec33._replace(q=222)._my_custom_method())
155
156 # ...and even override the magic '_fields' attribute again
157
158 class MyRecord345(MyRecord3):
159 _fields = 'e f g h i j k'
160
161 rec345 = MyRecord345(1, 2, 3, 4, 3, 2, 1)
162 print(rec345)
163 print(rec345._my_custom_method())
164 print(rec345._replace(f=222))
165 print(rec345._replace(f=222)._my_custom_method())
166
167 # Mixing-in some other classes is also possible:
168
169 class MyMixIn(object):
170 def method(self):
171 return "MyMixIn.method() called"
172 def _my_custom_method(self):
173 return "MyMixIn._my_custom_method() called"
174 def count(self, item):
175 return "MyMixIn.count({0}) called".format(item)
176 def _asdict(self): # (cannot override a namedtuple method, see below)
177 return "MyMixIn._asdict() called"
178
179 class MyRecord4(MyRecord33, MyMixIn): # mix-in on the right
180 _fields = 'j k l x'
181
182 class MyRecord5(MyMixIn, MyRecord33): # mix-in on the left
183 _fields = 'j k l x y'
184
185 rec4 = MyRecord4(1, 2, 3, 2)
186 print(rec4)
187 print(rec4.method())
188 print(rec4._my_custom_method()) # MyRecord33's
189 print(rec4.count(2)) # tuple's
190 print(rec4._replace(k=222))
191 print(rec4._replace(k=222).method())
192 print(rec4._replace(k=222)._my_custom_method()) # MyRecord33's
193 print(rec4._replace(k=222).count(8)) # tuple's
194
195 rec5 = MyRecord5(1, 2, 3, 2, 1)
196 print(rec5)
197 print(rec5.method())
198 print(rec5._my_custom_method()) # MyMixIn's
199 print(rec5.count(2)) # MyMixIn's
200 print(rec5._replace(k=222))
201 print(rec5._replace(k=222).method())
202 print(rec5._replace(k=222)._my_custom_method()) # MyMixIn's
203 print(rec5._replace(k=222).count(2)) # MyMixIn's
204
205 # None that behavior: the standard namedtuple methods cannot be
206 # overriden by a foreign mix-in -- even if the mix-in is declared
207 # as the leftmost base class (but, obviously, you can override them
208 # in the defined class or its subclasses):
209
210 print(rec4._asdict()) # (returns a dict, not "MyMixIn._asdict() called")
211 print(rec5._asdict()) # (returns a dict, not "MyMixIn._asdict() called")
212
213 class MyRecord6(MyRecord33):
214 _fields = 'j k l x y z'
215 def _asdict(self):
216 return "MyRecord6._asdict() called"
217 rec6 = MyRecord6(1, 2, 3, 1, 2, 3)
218 print(rec6._asdict()) # (this returns "MyRecord6._asdict() called")
219
220 # All that record classes are real subclasses of namedtuple.abc:
221
222 assert issubclass(MyRecord, namedtuple.abc)
223 assert issubclass(MyAbstractRecord, namedtuple.abc)
224 assert issubclass(AnotherAbstractRecord, namedtuple.abc)
225 assert issubclass(MyRecord2, namedtuple.abc)
226 assert issubclass(MyRecord3, namedtuple.abc)
227 assert issubclass(MyRecord33, namedtuple.abc)
228 assert issubclass(MyRecord345, namedtuple.abc)
229 assert issubclass(MyRecord4, namedtuple.abc)
230 assert issubclass(MyRecord5, namedtuple.abc)
231 assert issubclass(MyRecord6, namedtuple.abc)
232
233 # ...but abstract ones are not subclasses of tuple
234 # (and this is what you probably want):
235
236 assert not issubclass(MyAbstractRecord, tuple)
237 assert not issubclass(AnotherAbstractRecord, tuple)
238
239 assert issubclass(MyRecord, tuple)
240 assert issubclass(MyRecord2, tuple)
241 assert issubclass(MyRecord3, tuple)
242 assert issubclass(MyRecord33, tuple)
243 assert issubclass(MyRecord345, tuple)
244 assert issubclass(MyRecord4, tuple)
245 assert issubclass(MyRecord5, tuple)
246 assert issubclass(MyRecord6, tuple)
247
248 # Named tuple classes created with namedtuple() factory function
249 # (in the "traditional" way) are registered as "virtual" subclasses
250 # of namedtuple.abc:
251
252 MyTuple = namedtuple('MyTuple', 'a b c')
253 mt = MyTuple(1, 2, 3)
254 assert issubclass(MyTuple, namedtuple.abc)
255 assert isinstance(mt, namedtuple.abc)
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
new file mode 100644
index 0000000000..e4a44dda11
--- /dev/null
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -0,0 +1,157 @@
1"""
2BitBake Parsers
3
4File parsers for the BitBake build tools.
5
6"""
7
8
9# Copyright (C) 2003, 2004 Chris Larson
10# Copyright (C) 2003, 2004 Phil Blundell
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27handlers = []
28
29import os
30import stat
31import logging
32import bb
33import bb.utils
34import bb.siggen
35
36logger = logging.getLogger("BitBake.Parsing")
37
38class ParseError(Exception):
39 """Exception raised when parsing fails"""
40 def __init__(self, msg, filename, lineno=0):
41 self.msg = msg
42 self.filename = filename
43 self.lineno = lineno
44 Exception.__init__(self, msg, filename, lineno)
45
46 def __str__(self):
47 if self.lineno:
48 return "ParseError at %s:%d: %s" % (self.filename, self.lineno, self.msg)
49 else:
50 return "ParseError in %s: %s" % (self.filename, self.msg)
51
52class SkipPackage(Exception):
53 """Exception raised to skip this package"""
54
55__mtime_cache = {}
56def cached_mtime(f):
57 if f not in __mtime_cache:
58 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
59 return __mtime_cache[f]
60
61def cached_mtime_noerror(f):
62 if f not in __mtime_cache:
63 try:
64 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
65 except OSError:
66 return 0
67 return __mtime_cache[f]
68
69def update_mtime(f):
70 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
71 return __mtime_cache[f]
72
73def mark_dependency(d, f):
74 if f.startswith('./'):
75 f = "%s/%s" % (os.getcwd(), f[2:])
76 deps = (d.getVar('__depends') or [])
77 s = (f, cached_mtime_noerror(f))
78 if s not in deps:
79 deps.append(s)
80 d.setVar('__depends', deps)
81
82def check_dependency(d, f):
83 s = (f, cached_mtime_noerror(f))
84 deps = (d.getVar('__depends') or [])
85 return s in deps
86
87def supports(fn, data):
88 """Returns true if we have a handler for this file, false otherwise"""
89 for h in handlers:
90 if h['supports'](fn, data):
91 return 1
92 return 0
93
94def handle(fn, data, include = 0):
95 """Call the handler that is appropriate for this file"""
96 for h in handlers:
97 if h['supports'](fn, data):
98 with data.inchistory.include(fn):
99 return h['handle'](fn, data, include)
100 raise ParseError("not a BitBake file", fn)
101
102def init(fn, data):
103 for h in handlers:
104 if h['supports'](fn):
105 return h['init'](data)
106
107def init_parser(d):
108 bb.parse.siggen = bb.siggen.init(d)
109
110def resolve_file(fn, d):
111 if not os.path.isabs(fn):
112 bbpath = d.getVar("BBPATH", True)
113 newfn, attempts = bb.utils.which(bbpath, fn, history=True)
114 for af in attempts:
115 mark_dependency(d, af)
116 if not newfn:
117 raise IOError("file %s not found in %s" % (fn, bbpath))
118 fn = newfn
119
120 mark_dependency(d, fn)
121 if not os.path.isfile(fn):
122 raise IOError("file %s not found" % fn)
123
124 logger.debug(2, "LOAD %s", fn)
125 return fn
126
127# Used by OpenEmbedded metadata
128__pkgsplit_cache__={}
129def vars_from_file(mypkg, d):
130 if not mypkg or not mypkg.endswith((".bb", ".bbappend")):
131 return (None, None, None)
132 if mypkg in __pkgsplit_cache__:
133 return __pkgsplit_cache__[mypkg]
134
135 myfile = os.path.splitext(os.path.basename(mypkg))
136 parts = myfile[0].split('_')
137 __pkgsplit_cache__[mypkg] = parts
138 if len(parts) > 3:
139 raise ParseError("Unable to generate default variables from filename (too many underscores)", mypkg)
140 exp = 3 - len(parts)
141 tmplist = []
142 while exp != 0:
143 exp -= 1
144 tmplist.append(None)
145 parts.extend(tmplist)
146 return parts
147
148def get_file_depends(d):
149 '''Return the dependent files'''
150 dep_files = []
151 depends = d.getVar('__base_depends', True) or []
152 depends = depends + (d.getVar('__depends', True) or [])
153 for (fn, _) in depends:
154 dep_files.append(os.path.abspath(fn))
155 return " ".join(dep_files)
156
157from bb.parse.parse_py import __version__, ConfHandler, BBHandler
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
new file mode 100644
index 0000000000..d8c141b37c
--- /dev/null
+++ b/bitbake/lib/bb/parse/ast.py
@@ -0,0 +1,478 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4 AbstractSyntaxTree classes for the Bitbake language
5"""
6
7# Copyright (C) 2003, 2004 Chris Larson
8# Copyright (C) 2003, 2004 Phil Blundell
9# Copyright (C) 2009 Holger Hans Peter Freyther
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24from __future__ import absolute_import
25from future_builtins import filter
26import re
27import string
28import logging
29import bb
30import itertools
31from bb import methodpool
32from bb.parse import logger
33
34_bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]")
35
36class StatementGroup(list):
37 def eval(self, data):
38 for statement in self:
39 statement.eval(data)
40
41class AstNode(object):
42 def __init__(self, filename, lineno):
43 self.filename = filename
44 self.lineno = lineno
45
46class IncludeNode(AstNode):
47 def __init__(self, filename, lineno, what_file, force):
48 AstNode.__init__(self, filename, lineno)
49 self.what_file = what_file
50 self.force = force
51
52 def eval(self, data):
53 """
54 Include the file and evaluate the statements
55 """
56 s = data.expand(self.what_file)
57 logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
58
59 # TODO: Cache those includes... maybe not here though
60 if self.force:
61 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, "include required")
62 else:
63 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False)
64
65class ExportNode(AstNode):
66 def __init__(self, filename, lineno, var):
67 AstNode.__init__(self, filename, lineno)
68 self.var = var
69
70 def eval(self, data):
71 data.setVarFlag(self.var, "export", 1, op = 'exported')
72
73class DataNode(AstNode):
74 """
75 Various data related updates. For the sake of sanity
76 we have one class doing all this. This means that all
77 this need to be re-evaluated... we might be able to do
78 that faster with multiple classes.
79 """
80 def __init__(self, filename, lineno, groupd):
81 AstNode.__init__(self, filename, lineno)
82 self.groupd = groupd
83
84 def getFunc(self, key, data):
85 if 'flag' in self.groupd and self.groupd['flag'] != None:
86 return data.getVarFlag(key, self.groupd['flag'], noweakdefault=True)
87 else:
88 return data.getVar(key, noweakdefault=True)
89
90 def eval(self, data):
91 groupd = self.groupd
92 key = groupd["var"]
93 loginfo = {
94 'variable': key,
95 'file': self.filename,
96 'line': self.lineno,
97 }
98 if "exp" in groupd and groupd["exp"] != None:
99 data.setVarFlag(key, "export", 1, op = 'exported', **loginfo)
100
101 op = "set"
102 if "ques" in groupd and groupd["ques"] != None:
103 val = self.getFunc(key, data)
104 op = "set?"
105 if val == None:
106 val = groupd["value"]
107 elif "colon" in groupd and groupd["colon"] != None:
108 e = data.createCopy()
109 bb.data.update_data(e)
110 op = "immediate"
111 val = e.expand(groupd["value"], key + "[:=]")
112 elif "append" in groupd and groupd["append"] != None:
113 op = "append"
114 val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"])
115 elif "prepend" in groupd and groupd["prepend"] != None:
116 op = "prepend"
117 val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or ""))
118 elif "postdot" in groupd and groupd["postdot"] != None:
119 op = "postdot"
120 val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"])
121 elif "predot" in groupd and groupd["predot"] != None:
122 op = "predot"
123 val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or ""))
124 else:
125 val = groupd["value"]
126
127 flag = None
128 if 'flag' in groupd and groupd['flag'] != None:
129 flag = groupd['flag']
130 elif groupd["lazyques"]:
131 flag = "defaultval"
132
133 loginfo['op'] = op
134 loginfo['detail'] = groupd["value"]
135
136 if flag:
137 data.setVarFlag(key, flag, val, **loginfo)
138 else:
139 data.setVar(key, val, **loginfo)
140
141class MethodNode(AstNode):
142 tr_tbl = string.maketrans('/.+-@%', '______')
143
144 def __init__(self, filename, lineno, func_name, body):
145 AstNode.__init__(self, filename, lineno)
146 self.func_name = func_name
147 self.body = body
148
149 def eval(self, data):
150 text = '\n'.join(self.body)
151 if self.func_name == "__anonymous":
152 funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
153 text = "def %s(d):\n" % (funcname) + text
154 bb.methodpool.insert_method(funcname, text, self.filename)
155 anonfuncs = data.getVar('__BBANONFUNCS') or []
156 anonfuncs.append(funcname)
157 data.setVar('__BBANONFUNCS', anonfuncs)
158 data.setVar(funcname, text)
159 else:
160 data.setVarFlag(self.func_name, "func", 1)
161 data.setVar(self.func_name, text)
162
163class PythonMethodNode(AstNode):
164 def __init__(self, filename, lineno, function, modulename, body):
165 AstNode.__init__(self, filename, lineno)
166 self.function = function
167 self.modulename = modulename
168 self.body = body
169
170 def eval(self, data):
171 # Note we will add root to parsedmethods after having parse
172 # 'this' file. This means we will not parse methods from
173 # bb classes twice
174 text = '\n'.join(self.body)
175 bb.methodpool.insert_method(self.modulename, text, self.filename)
176 data.setVarFlag(self.function, "func", 1)
177 data.setVarFlag(self.function, "python", 1)
178 data.setVar(self.function, text)
179
180class MethodFlagsNode(AstNode):
181 def __init__(self, filename, lineno, key, m):
182 AstNode.__init__(self, filename, lineno)
183 self.key = key
184 self.m = m
185
186 def eval(self, data):
187 if data.getVar(self.key):
188 # clean up old version of this piece of metadata, as its
189 # flags could cause problems
190 data.setVarFlag(self.key, 'python', None)
191 data.setVarFlag(self.key, 'fakeroot', None)
192 if self.m.group("py") is not None:
193 data.setVarFlag(self.key, "python", "1")
194 else:
195 data.delVarFlag(self.key, "python")
196 if self.m.group("fr") is not None:
197 data.setVarFlag(self.key, "fakeroot", "1")
198 else:
199 data.delVarFlag(self.key, "fakeroot")
200
201class ExportFuncsNode(AstNode):
202 def __init__(self, filename, lineno, fns, classname):
203 AstNode.__init__(self, filename, lineno)
204 self.n = fns.split()
205 self.classname = classname
206
207 def eval(self, data):
208
209 for func in self.n:
210 calledfunc = self.classname + "_" + func
211
212 if data.getVar(func) and not data.getVarFlag(func, 'export_func'):
213 continue
214
215 if data.getVar(func):
216 data.setVarFlag(func, 'python', None)
217 data.setVarFlag(func, 'func', None)
218
219 for flag in [ "func", "python" ]:
220 if data.getVarFlag(calledfunc, flag):
221 data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag))
222 for flag in [ "dirs" ]:
223 if data.getVarFlag(func, flag):
224 data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag))
225
226 if data.getVarFlag(calledfunc, "python"):
227 data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n")
228 else:
229 data.setVar(func, " " + calledfunc + "\n")
230 data.setVarFlag(func, 'export_func', '1')
231
232class AddTaskNode(AstNode):
233 def __init__(self, filename, lineno, func, before, after):
234 AstNode.__init__(self, filename, lineno)
235 self.func = func
236 self.before = before
237 self.after = after
238
239 def eval(self, data):
240 bb.build.addtask(self.func, self.before, self.after, data)
241
242class DelTaskNode(AstNode):
243 def __init__(self, filename, lineno, func):
244 AstNode.__init__(self, filename, lineno)
245 self.func = func
246
247 def eval(self, data):
248 bb.build.deltask(self.func, data)
249
250class BBHandlerNode(AstNode):
251 def __init__(self, filename, lineno, fns):
252 AstNode.__init__(self, filename, lineno)
253 self.hs = fns.split()
254
255 def eval(self, data):
256 bbhands = data.getVar('__BBHANDLERS') or []
257 for h in self.hs:
258 bbhands.append(h)
259 data.setVarFlag(h, "handler", 1)
260 data.setVar('__BBHANDLERS', bbhands)
261
262class InheritNode(AstNode):
263 def __init__(self, filename, lineno, classes):
264 AstNode.__init__(self, filename, lineno)
265 self.classes = classes
266
267 def eval(self, data):
268 bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
269
270def handleInclude(statements, filename, lineno, m, force):
271 statements.append(IncludeNode(filename, lineno, m.group(1), force))
272
273def handleExport(statements, filename, lineno, m):
274 statements.append(ExportNode(filename, lineno, m.group(1)))
275
276def handleData(statements, filename, lineno, groupd):
277 statements.append(DataNode(filename, lineno, groupd))
278
279def handleMethod(statements, filename, lineno, func_name, body):
280 statements.append(MethodNode(filename, lineno, func_name, body))
281
282def handlePythonMethod(statements, filename, lineno, funcname, modulename, body):
283 statements.append(PythonMethodNode(filename, lineno, funcname, modulename, body))
284
285def handleMethodFlags(statements, filename, lineno, key, m):
286 statements.append(MethodFlagsNode(filename, lineno, key, m))
287
288def handleExportFuncs(statements, filename, lineno, m, classname):
289 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
290
291def handleAddTask(statements, filename, lineno, m):
292 func = m.group("func")
293 before = m.group("before")
294 after = m.group("after")
295 if func is None:
296 return
297
298 statements.append(AddTaskNode(filename, lineno, func, before, after))
299
300def handleDelTask(statements, filename, lineno, m):
301 func = m.group("func")
302 if func is None:
303 return
304
305 statements.append(DelTaskNode(filename, lineno, func))
306
307def handleBBHandlers(statements, filename, lineno, m):
308 statements.append(BBHandlerNode(filename, lineno, m.group(1)))
309
310def handleInherit(statements, filename, lineno, m):
311 classes = m.group(1)
312 statements.append(InheritNode(filename, lineno, classes))
313
314def finalize(fn, d, variant = None):
315 all_handlers = {}
316 for var in d.getVar('__BBHANDLERS') or []:
317 # try to add the handler
318 bb.event.register(var, d.getVar(var), (d.getVarFlag(var, "eventmask", True) or "").split())
319
320 bb.event.fire(bb.event.RecipePreFinalise(fn), d)
321
322 bb.data.expandKeys(d)
323 bb.data.update_data(d)
324 code = []
325 for funcname in d.getVar("__BBANONFUNCS") or []:
326 code.append("%s(d)" % funcname)
327 bb.utils.better_exec("\n".join(code), {"d": d})
328 bb.data.update_data(d)
329
330 tasklist = d.getVar('__BBTASKS') or []
331 deltasklist = d.getVar('__BBDELTASKS') or []
332 bb.build.add_tasks(tasklist, deltasklist, d)
333
334 bb.parse.siggen.finalise(fn, d, variant)
335
336 d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
337
338 bb.event.fire(bb.event.RecipeParsed(fn), d)
339
340def _create_variants(datastores, names, function):
341 def create_variant(name, orig_d, arg = None):
342 new_d = bb.data.createCopy(orig_d)
343 function(arg or name, new_d)
344 datastores[name] = new_d
345
346 for variant, variant_d in datastores.items():
347 for name in names:
348 if not variant:
349 # Based on main recipe
350 create_variant(name, variant_d)
351 else:
352 create_variant("%s-%s" % (variant, name), variant_d, name)
353
354def _expand_versions(versions):
355 def expand_one(version, start, end):
356 for i in xrange(start, end + 1):
357 ver = _bbversions_re.sub(str(i), version, 1)
358 yield ver
359
360 versions = iter(versions)
361 while True:
362 try:
363 version = next(versions)
364 except StopIteration:
365 break
366
367 range_ver = _bbversions_re.search(version)
368 if not range_ver:
369 yield version
370 else:
371 newversions = expand_one(version, int(range_ver.group("from")),
372 int(range_ver.group("to")))
373 versions = itertools.chain(newversions, versions)
374
375def multi_finalize(fn, d):
376 appends = (d.getVar("__BBAPPEND", True) or "").split()
377 for append in appends:
378 logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
379 bb.parse.BBHandler.handle(append, d, True)
380
381 onlyfinalise = d.getVar("__ONLYFINALISE", False)
382
383 safe_d = d
384 d = bb.data.createCopy(safe_d)
385 try:
386 finalize(fn, d)
387 except bb.parse.SkipPackage as e:
388 d.setVar("__SKIPPED", e.args[0])
389 datastores = {"": safe_d}
390
391 versions = (d.getVar("BBVERSIONS", True) or "").split()
392 if versions:
393 pv = orig_pv = d.getVar("PV", True)
394 baseversions = {}
395
396 def verfunc(ver, d, pv_d = None):
397 if pv_d is None:
398 pv_d = d
399
400 overrides = d.getVar("OVERRIDES", True).split(":")
401 pv_d.setVar("PV", ver)
402 overrides.append(ver)
403 bpv = baseversions.get(ver) or orig_pv
404 pv_d.setVar("BPV", bpv)
405 overrides.append(bpv)
406 d.setVar("OVERRIDES", ":".join(overrides))
407
408 versions = list(_expand_versions(versions))
409 for pos, version in enumerate(list(versions)):
410 try:
411 pv, bpv = version.split(":", 2)
412 except ValueError:
413 pass
414 else:
415 versions[pos] = pv
416 baseversions[pv] = bpv
417
418 if pv in versions and not baseversions.get(pv):
419 versions.remove(pv)
420 else:
421 pv = versions.pop()
422
423 # This is necessary because our existing main datastore
424 # has already been finalized with the old PV, we need one
425 # that's been finalized with the new PV.
426 d = bb.data.createCopy(safe_d)
427 verfunc(pv, d, safe_d)
428 try:
429 finalize(fn, d)
430 except bb.parse.SkipPackage as e:
431 d.setVar("__SKIPPED", e.args[0])
432
433 _create_variants(datastores, versions, verfunc)
434
435 extended = d.getVar("BBCLASSEXTEND", True) or ""
436 if extended:
437 # the following is to support bbextends with arguments, for e.g. multilib
438 # an example is as follows:
439 # BBCLASSEXTEND = "multilib:lib32"
440 # it will create foo-lib32, inheriting multilib.bbclass and set
441 # BBEXTENDCURR to "multilib" and BBEXTENDVARIANT to "lib32"
442 extendedmap = {}
443 variantmap = {}
444
445 for ext in extended.split():
446 eext = ext.split(':', 2)
447 if len(eext) > 1:
448 extendedmap[ext] = eext[0]
449 variantmap[ext] = eext[1]
450 else:
451 extendedmap[ext] = ext
452
453 pn = d.getVar("PN", True)
454 def extendfunc(name, d):
455 if name != extendedmap[name]:
456 d.setVar("BBEXTENDCURR", extendedmap[name])
457 d.setVar("BBEXTENDVARIANT", variantmap[name])
458 else:
459 d.setVar("PN", "%s-%s" % (pn, name))
460 bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d)
461
462 safe_d.setVar("BBCLASSEXTEND", extended)
463 _create_variants(datastores, extendedmap.keys(), extendfunc)
464
465 for variant, variant_d in datastores.iteritems():
466 if variant:
467 try:
468 if not onlyfinalise or variant in onlyfinalise:
469 finalize(fn, variant_d, variant)
470 except bb.parse.SkipPackage as e:
471 variant_d.setVar("__SKIPPED", e.args[0])
472
473 if len(datastores) > 1:
474 variants = filter(None, datastores.iterkeys())
475 safe_d.setVar("__VARIANTS", " ".join(variants))
476
477 datastores[""] = d
478 return datastores
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
new file mode 100644
index 0000000000..408890e48a
--- /dev/null
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -0,0 +1,267 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5 class for handling .bb files
6
7 Reads a .bb file and obtains its metadata
8
9"""
10
11
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14#
15# This program is free software; you can redistribute it and/or modify
16# it under the terms of the GNU General Public License version 2 as
17# published by the Free Software Foundation.
18#
19# This program is distributed in the hope that it will be useful,
20# but WITHOUT ANY WARRANTY; without even the implied warranty of
21# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22# GNU General Public License for more details.
23#
24# You should have received a copy of the GNU General Public License along
25# with this program; if not, write to the Free Software Foundation, Inc.,
26# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
27
28from __future__ import absolute_import
29import re, bb, os
30import logging
31import bb.build, bb.utils
32from bb import data
33
34from . import ConfHandler
35from .. import resolve_file, ast, logger
36from .ConfHandler import include, init
37
38# For compatibility
39bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
40
41__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
42__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
43__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
44__addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
45__deltask_regexp__ = re.compile("deltask\s+(?P<func>\w+)")
46__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
47__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
48__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
49
50
51__infunc__ = ""
52__inpython__ = False
53__body__ = []
54__classname__ = ""
55
56cached_statements = {}
57
58# We need to indicate EOF to the feeder. This code is so messy that
59# factoring it out to a close_parse_file method is out of question.
60# We will use the IN_PYTHON_EOF as an indicator to just close the method
61#
62# The two parts using it are tightly integrated anyway
63IN_PYTHON_EOF = -9999999999999
64
65
66
67def supports(fn, d):
68 """Return True if fn has a supported extension"""
69 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
70
71def inherit(files, fn, lineno, d):
72 __inherit_cache = d.getVar('__inherit_cache') or []
73 files = d.expand(files).split()
74 for file in files:
75 if not os.path.isabs(file) and not file.endswith(".bbclass"):
76 file = os.path.join('classes', '%s.bbclass' % file)
77
78 if not os.path.isabs(file):
79 dname = os.path.dirname(fn)
80 bbpath = "%s:%s" % (dname, d.getVar("BBPATH", True))
81 abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
82 for af in attempts:
83 if af != abs_fn:
84 bb.parse.mark_dependency(d, af)
85 if abs_fn:
86 file = abs_fn
87
88 if not file in __inherit_cache:
89 logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
90 __inherit_cache.append( file )
91 d.setVar('__inherit_cache', __inherit_cache)
92 include(fn, file, lineno, d, "inherit")
93 __inherit_cache = d.getVar('__inherit_cache') or []
94
95def get_statements(filename, absolute_filename, base_name):
96 global cached_statements
97
98 try:
99 return cached_statements[absolute_filename]
100 except KeyError:
101 file = open(absolute_filename, 'r')
102 statements = ast.StatementGroup()
103
104 lineno = 0
105 while True:
106 lineno = lineno + 1
107 s = file.readline()
108 if not s: break
109 s = s.rstrip()
110 feeder(lineno, s, filename, base_name, statements)
111 file.close()
112 if __inpython__:
113 # add a blank line to close out any python definition
114 feeder(IN_PYTHON_EOF, "", filename, base_name, statements)
115
116 if filename.endswith(".bbclass") or filename.endswith(".inc"):
117 cached_statements[absolute_filename] = statements
118 return statements
119
120def handle(fn, d, include):
121 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
122 __body__ = []
123 __infunc__ = ""
124 __classname__ = ""
125 __residue__ = []
126
127
128 if include == 0:
129 logger.debug(2, "BB %s: handle(data)", fn)
130 else:
131 logger.debug(2, "BB %s: handle(data, include)", fn)
132
133 base_name = os.path.basename(fn)
134 (root, ext) = os.path.splitext(base_name)
135 init(d)
136
137 if ext == ".bbclass":
138 __classname__ = root
139 __inherit_cache = d.getVar('__inherit_cache') or []
140 if not fn in __inherit_cache:
141 __inherit_cache.append(fn)
142 d.setVar('__inherit_cache', __inherit_cache)
143
144 if include != 0:
145 oldfile = d.getVar('FILE')
146 else:
147 oldfile = None
148
149 abs_fn = resolve_file(fn, d)
150
151 if include:
152 bb.parse.mark_dependency(d, abs_fn)
153
154 # actual loading
155 statements = get_statements(fn, abs_fn, base_name)
156
157 # DONE WITH PARSING... time to evaluate
158 if ext != ".bbclass":
159 d.setVar('FILE', abs_fn)
160
161 try:
162 statements.eval(d)
163 except bb.parse.SkipPackage:
164 bb.data.setVar("__SKIPPED", True, d)
165 if include == 0:
166 return { "" : d }
167
168 if ext != ".bbclass" and include == 0:
169 return ast.multi_finalize(fn, d)
170
171 if oldfile:
172 d.setVar("FILE", oldfile)
173
174 return d
175
176def feeder(lineno, s, fn, root, statements):
177 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
178 if __infunc__:
179 if s == '}':
180 __body__.append('')
181 ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
182 __infunc__ = ""
183 __body__ = []
184 else:
185 __body__.append(s)
186 return
187
188 if __inpython__:
189 m = __python_func_regexp__.match(s)
190 if m and lineno != IN_PYTHON_EOF:
191 __body__.append(s)
192 return
193 else:
194 ast.handlePythonMethod(statements, fn, lineno, __inpython__,
195 root, __body__)
196 __body__ = []
197 __inpython__ = False
198
199 if lineno == IN_PYTHON_EOF:
200 return
201
202 if s and s[0] == '#':
203 if len(__residue__) != 0 and __residue__[0][0] != "#":
204 bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
205
206 if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"):
207 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
208
209 if s and s[-1] == '\\':
210 __residue__.append(s[:-1])
211 return
212
213 s = "".join(__residue__) + s
214 __residue__ = []
215
216 # Skip empty lines
217 if s == '':
218 return
219
220 # Skip comments
221 if s[0] == '#':
222 return
223
224 m = __func_start_regexp__.match(s)
225 if m:
226 __infunc__ = m.group("func") or "__anonymous"
227 ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
228 return
229
230 m = __def_regexp__.match(s)
231 if m:
232 __body__.append(s)
233 __inpython__ = m.group(1)
234
235 return
236
237 m = __export_func_regexp__.match(s)
238 if m:
239 ast.handleExportFuncs(statements, fn, lineno, m, __classname__)
240 return
241
242 m = __addtask_regexp__.match(s)
243 if m:
244 ast.handleAddTask(statements, fn, lineno, m)
245 return
246
247 m = __deltask_regexp__.match(s)
248 if m:
249 ast.handleDelTask(statements, fn, lineno, m)
250 return
251
252 m = __addhandler_regexp__.match(s)
253 if m:
254 ast.handleBBHandlers(statements, fn, lineno, m)
255 return
256
257 m = __inherit_regexp__.match(s)
258 if m:
259 ast.handleInherit(statements, fn, lineno, m)
260 return
261
262 return ConfHandler.feeder(lineno, s, fn, statements)
263
264# Add us to the handlers list
265from .. import handlers
266handlers.append({'supports': supports, 'handle': handle, 'init': init})
267del handlers
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
new file mode 100644
index 0000000000..978ebe4608
--- /dev/null
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -0,0 +1,189 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5 class for handling configuration data files
6
7 Reads a .conf file and obtains its metadata
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2003, 2004 Phil Blundell
13#
14# This program is free software; you can redistribute it and/or modify
15# it under the terms of the GNU General Public License version 2 as
16# published by the Free Software Foundation.
17#
18# This program is distributed in the hope that it will be useful,
19# but WITHOUT ANY WARRANTY; without even the implied warranty of
20# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21# GNU General Public License for more details.
22#
23# You should have received a copy of the GNU General Public License along
24# with this program; if not, write to the Free Software Foundation, Inc.,
25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26
27import re, os
28import logging
29import bb.utils
30from bb.parse import ParseError, resolve_file, ast, logger
31
32__config_regexp__ = re.compile( r"""
33 ^
34 (?P<exp>export\s*)?
35 (?P<var>[a-zA-Z0-9\-~_+.${}/]+?)
36 (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?
37
38 \s* (
39 (?P<colon>:=) |
40 (?P<lazyques>\?\?=) |
41 (?P<ques>\?=) |
42 (?P<append>\+=) |
43 (?P<prepend>=\+) |
44 (?P<predot>=\.) |
45 (?P<postdot>\.=) |
46 =
47 ) \s*
48
49 (?!'[^']*'[^']*'$)
50 (?!\"[^\"]*\"[^\"]*\"$)
51 (?P<apo>['\"])
52 (?P<value>.*)
53 (?P=apo)
54 $
55 """, re.X)
56__include_regexp__ = re.compile( r"include\s+(.+)" )
57__require_regexp__ = re.compile( r"require\s+(.+)" )
58__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
59
60def init(data):
61 topdir = data.getVar('TOPDIR')
62 if not topdir:
63 data.setVar('TOPDIR', os.getcwd())
64
65
66def supports(fn, d):
67 return fn[-5:] == ".conf"
68
69def include(oldfn, fn, lineno, data, error_out):
70 """
71 error_out: A string indicating the verb (e.g. "include", "inherit") to be
72 used in a ParseError that will be raised if the file to be included could
73 not be included. Specify False to avoid raising an error in this case.
74 """
75 if oldfn == fn: # prevent infinite recursion
76 return None
77
78 import bb
79 fn = data.expand(fn)
80 oldfn = data.expand(oldfn)
81
82 if not os.path.isabs(fn):
83 dname = os.path.dirname(oldfn)
84 bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
85 abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
86 if abs_fn and bb.parse.check_dependency(data, abs_fn):
87 bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
88 for af in attempts:
89 bb.parse.mark_dependency(data, af)
90 if abs_fn:
91 fn = abs_fn
92 elif bb.parse.check_dependency(data, fn):
93 bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
94
95 from bb.parse import handle
96 try:
97 ret = handle(fn, data, True)
98 except (IOError, OSError):
99 if error_out:
100 raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
101 logger.debug(2, "CONF file '%s' not found", fn)
102 bb.parse.mark_dependency(data, fn)
103
104# We have an issue where a UI might want to enforce particular settings such as
105# an empty DISTRO variable. If configuration files do something like assigning
106# a weak default, it turns out to be very difficult to filter out these changes,
107# particularly when the weak default might appear half way though parsing a chain
108# of configuration files. We therefore let the UIs hook into configuration file
109# parsing. This turns out to be a hard problem to solve any other way.
110confFilters = []
111
112def handle(fn, data, include):
113 init(data)
114
115 if include == 0:
116 oldfile = None
117 else:
118 oldfile = data.getVar('FILE')
119
120 abs_fn = resolve_file(fn, data)
121 f = open(abs_fn, 'r')
122
123 if include:
124 bb.parse.mark_dependency(data, abs_fn)
125
126 statements = ast.StatementGroup()
127 lineno = 0
128 while True:
129 lineno = lineno + 1
130 s = f.readline()
131 if not s:
132 break
133 w = s.strip()
134 # skip empty lines
135 if not w:
136 continue
137 s = s.rstrip()
138 while s[-1] == '\\':
139 s2 = f.readline().strip()
140 lineno = lineno + 1
141 if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
142 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
143 s = s[:-1] + s2
144 # skip comments
145 if s[0] == '#':
146 continue
147 feeder(lineno, s, abs_fn, statements)
148
149 # DONE WITH PARSING... time to evaluate
150 data.setVar('FILE', abs_fn)
151 statements.eval(data)
152 if oldfile:
153 data.setVar('FILE', oldfile)
154
155 f.close()
156
157 for f in confFilters:
158 f(fn, data)
159
160 return data
161
162def feeder(lineno, s, fn, statements):
163 m = __config_regexp__.match(s)
164 if m:
165 groupd = m.groupdict()
166 ast.handleData(statements, fn, lineno, groupd)
167 return
168
169 m = __include_regexp__.match(s)
170 if m:
171 ast.handleInclude(statements, fn, lineno, m, False)
172 return
173
174 m = __require_regexp__.match(s)
175 if m:
176 ast.handleInclude(statements, fn, lineno, m, True)
177 return
178
179 m = __export_regexp__.match(s)
180 if m:
181 ast.handleExport(statements, fn, lineno, m)
182 return
183
184 raise ParseError("unparsed line: '%s'" % s, fn, lineno);
185
186# Add us to the handlers list
187from bb.parse import handlers
188handlers.append({'supports': supports, 'handle': handle, 'init': init})
189del handlers
diff --git a/bitbake/lib/bb/parse/parse_py/__init__.py b/bitbake/lib/bb/parse/parse_py/__init__.py
new file mode 100644
index 0000000000..3e658d0de9
--- /dev/null
+++ b/bitbake/lib/bb/parse/parse_py/__init__.py
@@ -0,0 +1,33 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake Parsers
6
7File parsers for the BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2003, 2004 Phil Blundell
13#
14# This program is free software; you can redistribute it and/or modify
15# it under the terms of the GNU General Public License version 2 as
16# published by the Free Software Foundation.
17#
18# This program is distributed in the hope that it will be useful,
19# but WITHOUT ANY WARRANTY; without even the implied warranty of
20# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21# GNU General Public License for more details.
22#
23# You should have received a copy of the GNU General Public License along
24# with this program; if not, write to the Free Software Foundation, Inc.,
25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26#
27# Based on functions from the base bb module, Copyright 2003 Holger Schurig
28
29from __future__ import absolute_import
30from . import ConfHandler
31from . import BBHandler
32
33__version__ = '1.0'
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
new file mode 100644
index 0000000000..994e61b0a6
--- /dev/null
+++ b/bitbake/lib/bb/persist_data.py
@@ -0,0 +1,215 @@
1"""BitBake Persistent Data Store
2
3Used to store data in a central location such that other threads/tasks can
4access them at some future date. Acts as a convenience wrapper around sqlite,
5currently, providing a key/value store accessed by 'domain'.
6"""
7
8# Copyright (C) 2007 Richard Purdie
9# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24import collections
25import logging
26import os.path
27import sys
28import warnings
29from bb.compat import total_ordering
30from collections import Mapping
31
32try:
33 import sqlite3
34except ImportError:
35 from pysqlite2 import dbapi2 as sqlite3
36
37sqlversion = sqlite3.sqlite_version_info
38if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
39 raise Exception("sqlite3 version 3.3.0 or later is required.")
40
41
42logger = logging.getLogger("BitBake.PersistData")
43if hasattr(sqlite3, 'enable_shared_cache'):
44 try:
45 sqlite3.enable_shared_cache(True)
46 except sqlite3.OperationalError:
47 pass
48
49
50@total_ordering
51class SQLTable(collections.MutableMapping):
52 """Object representing a table/domain in the database"""
53 def __init__(self, cachefile, table):
54 self.cachefile = cachefile
55 self.table = table
56 self.cursor = connect(self.cachefile)
57
58 self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
59 % table)
60
61 def _execute(self, *query):
62 """Execute a query, waiting to acquire a lock if necessary"""
63 count = 0
64 while True:
65 try:
66 return self.cursor.execute(*query)
67 except sqlite3.OperationalError as exc:
68 if 'database is locked' in str(exc) and count < 500:
69 count = count + 1
70 self.cursor.close()
71 self.cursor = connect(self.cachefile)
72 continue
73 raise
74
75 def __enter__(self):
76 self.cursor.__enter__()
77 return self
78
79 def __exit__(self, *excinfo):
80 self.cursor.__exit__(*excinfo)
81
82 def __getitem__(self, key):
83 data = self._execute("SELECT * from %s where key=?;" %
84 self.table, [key])
85 for row in data:
86 return row[1]
87 raise KeyError(key)
88
89 def __delitem__(self, key):
90 if key not in self:
91 raise KeyError(key)
92 self._execute("DELETE from %s where key=?;" % self.table, [key])
93
94 def __setitem__(self, key, value):
95 if not isinstance(key, basestring):
96 raise TypeError('Only string keys are supported')
97 elif not isinstance(value, basestring):
98 raise TypeError('Only string values are supported')
99
100 data = self._execute("SELECT * from %s where key=?;" %
101 self.table, [key])
102 exists = len(list(data))
103 if exists:
104 self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
105 [value, key])
106 else:
107 self._execute("INSERT into %s(key, value) values (?, ?);" %
108 self.table, [key, value])
109
110 def __contains__(self, key):
111 return key in set(self)
112
113 def __len__(self):
114 data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
115 for row in data:
116 return row[0]
117
118 def __iter__(self):
119 data = self._execute("SELECT key FROM %s;" % self.table)
120 return (row[0] for row in data)
121
122 def __lt__(self, other):
123 if not isinstance(other, Mapping):
124 raise NotImplemented
125
126 return len(self) < len(other)
127
128 def get_by_pattern(self, pattern):
129 data = self._execute("SELECT * FROM %s WHERE key LIKE ?;" %
130 self.table, [pattern])
131 return [row[1] for row in data]
132
133 def values(self):
134 return list(self.itervalues())
135
136 def itervalues(self):
137 data = self._execute("SELECT value FROM %s;" % self.table)
138 return (row[0] for row in data)
139
140 def items(self):
141 return list(self.iteritems())
142
143 def iteritems(self):
144 return self._execute("SELECT * FROM %s;" % self.table)
145
146 def clear(self):
147 self._execute("DELETE FROM %s;" % self.table)
148
149 def has_key(self, key):
150 return key in self
151
152
153class PersistData(object):
154 """Deprecated representation of the bitbake persistent data store"""
155 def __init__(self, d):
156 warnings.warn("Use of PersistData is deprecated. Please use "
157 "persist(domain, d) instead.",
158 category=DeprecationWarning,
159 stacklevel=2)
160
161 self.data = persist(d)
162 logger.debug(1, "Using '%s' as the persistent data cache",
163 self.data.filename)
164
165 def addDomain(self, domain):
166 """
167 Add a domain (pending deprecation)
168 """
169 return self.data[domain]
170
171 def delDomain(self, domain):
172 """
173 Removes a domain and all the data it contains
174 """
175 del self.data[domain]
176
177 def getKeyValues(self, domain):
178 """
179 Return a list of key + value pairs for a domain
180 """
181 return self.data[domain].items()
182
183 def getValue(self, domain, key):
184 """
185 Return the value of a key for a domain
186 """
187 return self.data[domain][key]
188
189 def setValue(self, domain, key, value):
190 """
191 Sets the value of a key for a domain
192 """
193 self.data[domain][key] = value
194
195 def delValue(self, domain, key):
196 """
197 Deletes a key/value pair
198 """
199 del self.data[domain][key]
200
201def connect(database):
202 return sqlite3.connect(database, timeout=5, isolation_level=None)
203
204def persist(domain, d):
205 """Convenience factory for SQLTable objects based upon metadata"""
206 import bb.utils
207 cachedir = (d.getVar("PERSISTENT_DIR", True) or
208 d.getVar("CACHE", True))
209 if not cachedir:
210 logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
211 sys.exit(1)
212
213 bb.utils.mkdirhier(cachedir)
214 cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
215 return SQLTable(cachefile, domain)
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
new file mode 100644
index 0000000000..8b1aea9a10
--- /dev/null
+++ b/bitbake/lib/bb/process.py
@@ -0,0 +1,133 @@
1import logging
2import signal
3import subprocess
4import errno
5import select
6
7logger = logging.getLogger('BitBake.Process')
8
9def subprocess_setup():
10 # Python installs a SIGPIPE handler by default. This is usually not what
11 # non-Python subprocesses expect.
12 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
13
14class CmdError(RuntimeError):
15 def __init__(self, command, msg=None):
16 self.command = command
17 self.msg = msg
18
19 def __str__(self):
20 if not isinstance(self.command, basestring):
21 cmd = subprocess.list2cmdline(self.command)
22 else:
23 cmd = self.command
24
25 msg = "Execution of '%s' failed" % cmd
26 if self.msg:
27 msg += ': %s' % self.msg
28 return msg
29
30class NotFoundError(CmdError):
31 def __str__(self):
32 return CmdError.__str__(self) + ": command not found"
33
34class ExecutionError(CmdError):
35 def __init__(self, command, exitcode, stdout = None, stderr = None):
36 CmdError.__init__(self, command)
37 self.exitcode = exitcode
38 self.stdout = stdout
39 self.stderr = stderr
40
41 def __str__(self):
42 message = ""
43 if self.stderr:
44 message += self.stderr
45 if self.stdout:
46 message += self.stdout
47 if message:
48 message = ":\n" + message
49 return (CmdError.__str__(self) +
50 " with exit code %s" % self.exitcode + message)
51
52class Popen(subprocess.Popen):
53 defaults = {
54 "close_fds": True,
55 "preexec_fn": subprocess_setup,
56 "stdout": subprocess.PIPE,
57 "stderr": subprocess.STDOUT,
58 "stdin": subprocess.PIPE,
59 "shell": False,
60 }
61
62 def __init__(self, *args, **kwargs):
63 options = dict(self.defaults)
64 options.update(kwargs)
65 subprocess.Popen.__init__(self, *args, **options)
66
67def _logged_communicate(pipe, log, input):
68 if pipe.stdin:
69 if input is not None:
70 pipe.stdin.write(input)
71 pipe.stdin.close()
72
73 outdata, errdata = [], []
74 rin = []
75
76 if pipe.stdout is not None:
77 bb.utils.nonblockingfd(pipe.stdout.fileno())
78 rin.append(pipe.stdout)
79 if pipe.stderr is not None:
80 bb.utils.nonblockingfd(pipe.stderr.fileno())
81 rin.append(pipe.stderr)
82
83 try:
84 while pipe.poll() is None:
85 rlist = rin
86 try:
87 r,w,e = select.select (rlist, [], [], 1)
88 except OSError as e:
89 if e.errno != errno.EINTR:
90 raise
91
92 if pipe.stdout in r:
93 data = pipe.stdout.read()
94 if data is not None:
95 outdata.append(data)
96 log.write(data)
97
98 if pipe.stderr in r:
99 data = pipe.stderr.read()
100 if data is not None:
101 errdata.append(data)
102 log.write(data)
103 finally:
104 log.flush()
105 if pipe.stdout is not None:
106 pipe.stdout.close()
107 if pipe.stderr is not None:
108 pipe.stderr.close()
109 return ''.join(outdata), ''.join(errdata)
110
111def run(cmd, input=None, log=None, **options):
112 """Convenience function to run a command and return its output, raising an
113 exception when the command fails"""
114
115 if isinstance(cmd, basestring) and not "shell" in options:
116 options["shell"] = True
117
118 try:
119 pipe = Popen(cmd, **options)
120 except OSError as exc:
121 if exc.errno == 2:
122 raise NotFoundError(cmd)
123 else:
124 raise CmdError(cmd, exc)
125
126 if log:
127 stdout, stderr = _logged_communicate(pipe, log, input)
128 else:
129 stdout, stderr = pipe.communicate(input)
130
131 if pipe.returncode != 0:
132 raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
133 return stdout, stderr
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
new file mode 100644
index 0000000000..637e1fab96
--- /dev/null
+++ b/bitbake/lib/bb/providers.py
@@ -0,0 +1,381 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Copyright (C) 2003, 2004 Chris Larson
5# Copyright (C) 2003, 2004 Phil Blundell
6# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
7# Copyright (C) 2005 Holger Hans Peter Freyther
8# Copyright (C) 2005 ROAD GmbH
9# Copyright (C) 2006 Richard Purdie
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24import re
25import logging
26from bb import data, utils
27from collections import defaultdict
28import bb
29
30logger = logging.getLogger("BitBake.Provider")
31
32class NoProvider(bb.BBHandledException):
33 """Exception raised when no provider of a build dependency can be found"""
34
35class NoRProvider(bb.BBHandledException):
36 """Exception raised when no provider of a runtime dependency can be found"""
37
38class MultipleRProvider(bb.BBHandledException):
39 """Exception raised when multiple providers of a runtime dependency can be found"""
40
41def findProviders(cfgData, dataCache, pkg_pn = None):
42 """
43 Convenience function to get latest and preferred providers in pkg_pn
44 """
45
46 if not pkg_pn:
47 pkg_pn = dataCache.pkg_pn
48
49 # Need to ensure data store is expanded
50 localdata = data.createCopy(cfgData)
51 bb.data.update_data(localdata)
52 bb.data.expandKeys(localdata)
53
54 preferred_versions = {}
55 latest_versions = {}
56
57 for pn in pkg_pn:
58 (last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn)
59 preferred_versions[pn] = (pref_ver, pref_file)
60 latest_versions[pn] = (last_ver, last_file)
61
62 return (latest_versions, preferred_versions)
63
64
65def allProviders(dataCache):
66 """
67 Find all providers for each pn
68 """
69 all_providers = defaultdict(list)
70 for (fn, pn) in dataCache.pkg_fn.items():
71 ver = dataCache.pkg_pepvpr[fn]
72 all_providers[pn].append((ver, fn))
73 return all_providers
74
75
76def sortPriorities(pn, dataCache, pkg_pn = None):
77 """
78 Reorder pkg_pn by file priority and default preference
79 """
80
81 if not pkg_pn:
82 pkg_pn = dataCache.pkg_pn
83
84 files = pkg_pn[pn]
85 priorities = {}
86 for f in files:
87 priority = dataCache.bbfile_priority[f]
88 preference = dataCache.pkg_dp[f]
89 if priority not in priorities:
90 priorities[priority] = {}
91 if preference not in priorities[priority]:
92 priorities[priority][preference] = []
93 priorities[priority][preference].append(f)
94 tmp_pn = []
95 for pri in sorted(priorities):
96 tmp_pref = []
97 for pref in sorted(priorities[pri]):
98 tmp_pref.extend(priorities[pri][pref])
99 tmp_pn = [tmp_pref] + tmp_pn
100
101 return tmp_pn
102
103def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
104 """
105 Check if the version pe,pv,pr is the preferred one.
106 If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%'
107 """
108 if (pr == preferred_r or preferred_r == None):
109 if (pe == preferred_e or preferred_e == None):
110 if preferred_v == pv:
111 return True
112 if preferred_v != None and preferred_v.endswith('%') and pv.startswith(preferred_v[:len(preferred_v)-1]):
113 return True
114 return False
115
116def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
117 """
118 Find the first provider in pkg_pn with a PREFERRED_VERSION set.
119 """
120
121 preferred_file = None
122 preferred_ver = None
123
124 localdata = data.createCopy(cfgData)
125 localdata.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn))
126 bb.data.update_data(localdata)
127
128 preferred_v = localdata.getVar('PREFERRED_VERSION', True)
129 if preferred_v:
130 m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
131 if m:
132 if m.group(1):
133 preferred_e = m.group(1)[:-1]
134 else:
135 preferred_e = None
136 preferred_v = m.group(2)
137 if m.group(3):
138 preferred_r = m.group(3)[1:]
139 else:
140 preferred_r = None
141 else:
142 preferred_e = None
143 preferred_r = None
144
145 for file_set in pkg_pn:
146 for f in file_set:
147 pe, pv, pr = dataCache.pkg_pepvpr[f]
148 if preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
149 preferred_file = f
150 preferred_ver = (pe, pv, pr)
151 break
152 if preferred_file:
153 break;
154 if preferred_r:
155 pv_str = '%s-%s' % (preferred_v, preferred_r)
156 else:
157 pv_str = preferred_v
158 if not (preferred_e is None):
159 pv_str = '%s:%s' % (preferred_e, pv_str)
160 itemstr = ""
161 if item:
162 itemstr = " (for item %s)" % item
163 if preferred_file is None:
164 logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
165 available_vers = []
166 for file_set in pkg_pn:
167 for f in file_set:
168 pe, pv, pr = dataCache.pkg_pepvpr[f]
169 ver_str = pv
170 if pe:
171 ver_str = "%s:%s" % (pe, ver_str)
172 if not ver_str in available_vers:
173 available_vers.append(ver_str)
174 if available_vers:
175 available_vers.sort()
176 logger.info("versions of %s available: %s", pn, ' '.join(available_vers))
177 else:
178 logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
179
180 return (preferred_ver, preferred_file)
181
182
183def findLatestProvider(pn, cfgData, dataCache, file_set):
184 """
185 Return the highest version of the providers in file_set.
186 Take default preferences into account.
187 """
188 latest = None
189 latest_p = 0
190 latest_f = None
191 for file_name in file_set:
192 pe, pv, pr = dataCache.pkg_pepvpr[file_name]
193 dp = dataCache.pkg_dp[file_name]
194
195 if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p):
196 latest = (pe, pv, pr)
197 latest_f = file_name
198 latest_p = dp
199
200 return (latest, latest_f)
201
202
203def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
204 """
205 If there is a PREFERRED_VERSION, find the highest-priority bbfile
206 providing that version. If not, find the latest version provided by
207 an bbfile in the highest-priority set.
208 """
209
210 sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
211 # Find the highest priority provider with a PREFERRED_VERSION set
212 (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
213 # Find the latest version of the highest priority provider
214 (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
215
216 if preferred_file is None:
217 preferred_file = latest_f
218 preferred_ver = latest
219
220 return (latest, latest_f, preferred_ver, preferred_file)
221
222
223def _filterProviders(providers, item, cfgData, dataCache):
224 """
225 Take a list of providers and filter/reorder according to the
226 environment variables and previous build results
227 """
228 eligible = []
229 preferred_versions = {}
230 sortpkg_pn = {}
231
232 # The order of providers depends on the order of the files on the disk
233 # up to here. Sort pkg_pn to make dependency issues reproducible rather
234 # than effectively random.
235 providers.sort()
236
237 # Collate providers by PN
238 pkg_pn = {}
239 for p in providers:
240 pn = dataCache.pkg_fn[p]
241 if pn not in pkg_pn:
242 pkg_pn[pn] = []
243 pkg_pn[pn].append(p)
244
245 logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
246
247 # First add PREFERRED_VERSIONS
248 for pn in pkg_pn:
249 sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
250 preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
251 if preferred_versions[pn][1]:
252 eligible.append(preferred_versions[pn][1])
253
254 # Now add latest versions
255 for pn in sortpkg_pn:
256 if pn in preferred_versions and preferred_versions[pn][1]:
257 continue
258 preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0])
259 eligible.append(preferred_versions[pn][1])
260
261 if len(eligible) == 0:
262 logger.error("no eligible providers for %s", item)
263 return 0
264
265 # If pn == item, give it a slight default preference
266 # This means PREFERRED_PROVIDER_foobar defaults to foobar if available
267 for p in providers:
268 pn = dataCache.pkg_fn[p]
269 if pn != item:
270 continue
271 (newvers, fn) = preferred_versions[pn]
272 if not fn in eligible:
273 continue
274 eligible.remove(fn)
275 eligible = [fn] + eligible
276
277 return eligible
278
279
280def filterProviders(providers, item, cfgData, dataCache):
281 """
282 Take a list of providers and filter/reorder according to the
283 environment variables and previous build results
284 Takes a "normal" target item
285 """
286
287 eligible = _filterProviders(providers, item, cfgData, dataCache)
288
289 prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, True)
290 if prefervar:
291 dataCache.preferred[item] = prefervar
292
293 foundUnique = False
294 if item in dataCache.preferred:
295 for p in eligible:
296 pn = dataCache.pkg_fn[p]
297 if dataCache.preferred[item] == pn:
298 logger.verbose("selecting %s to satisfy %s due to PREFERRED_PROVIDERS", pn, item)
299 eligible.remove(p)
300 eligible = [p] + eligible
301 foundUnique = True
302 break
303
304 logger.debug(1, "sorted providers for %s are: %s", item, eligible)
305
306 return eligible, foundUnique
307
308def filterProvidersRunTime(providers, item, cfgData, dataCache):
309 """
310 Take a list of providers and filter/reorder according to the
311 environment variables and previous build results
312 Takes a "runtime" target item
313 """
314
315 eligible = _filterProviders(providers, item, cfgData, dataCache)
316
317 # Should use dataCache.preferred here?
318 preferred = []
319 preferred_vars = []
320 pns = {}
321 for p in eligible:
322 pns[dataCache.pkg_fn[p]] = p
323 for p in eligible:
324 pn = dataCache.pkg_fn[p]
325 provides = dataCache.pn_provides[pn]
326 for provide in provides:
327 prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True)
328 #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
329 if prefervar in pns and pns[prefervar] not in preferred:
330 var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
331 logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
332 preferred_vars.append(var)
333 pref = pns[prefervar]
334 eligible.remove(pref)
335 eligible = [pref] + eligible
336 preferred.append(pref)
337 break
338
339 numberPreferred = len(preferred)
340
341 if numberPreferred > 1:
342 logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s", item, preferred, preferred_vars)
343
344 logger.debug(1, "sorted runtime providers for %s are: %s", item, eligible)
345
346 return eligible, numberPreferred
347
348regexp_cache = {}
349
350def getRuntimeProviders(dataCache, rdepend):
351 """
352 Return any providers of runtime dependency
353 """
354 rproviders = []
355
356 if rdepend in dataCache.rproviders:
357 rproviders += dataCache.rproviders[rdepend]
358
359 if rdepend in dataCache.packages:
360 rproviders += dataCache.packages[rdepend]
361
362 if rproviders:
363 return rproviders
364
365 # Only search dynamic packages if we can't find anything in other variables
366 for pattern in dataCache.packages_dynamic:
367 pattern = pattern.replace('+', "\+")
368 if pattern in regexp_cache:
369 regexp = regexp_cache[pattern]
370 else:
371 try:
372 regexp = re.compile(pattern)
373 except:
374 logger.error("Error parsing regular expression '%s'", pattern)
375 raise
376 regexp_cache[pattern] = regexp
377 if regexp.match(rdepend):
378 rproviders += dataCache.packages_dynamic[pattern]
379 logger.debug(1, "Assuming %s is a dynamic package, but it may not exist" % rdepend)
380
381 return rproviders
diff --git a/bitbake/lib/bb/pysh/__init__.py b/bitbake/lib/bb/pysh/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/pysh/__init__.py
diff --git a/bitbake/lib/bb/pysh/builtin.py b/bitbake/lib/bb/pysh/builtin.py
new file mode 100644
index 0000000000..b748e4a4f2
--- /dev/null
+++ b/bitbake/lib/bb/pysh/builtin.py
@@ -0,0 +1,710 @@
1# builtin.py - builtins and utilities definitions for pysh.
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8"""Builtin and internal utilities implementations.
9
10- Beware not to use python interpreter environment as if it were the shell
11environment. For instance, commands working directory must be explicitely handled
12through env['PWD'] instead of relying on python working directory.
13"""
14import errno
15import optparse
16import os
17import re
18import subprocess
19import sys
20import time
21
22def has_subprocess_bug():
23 return getattr(subprocess, 'list2cmdline') and \
24 ( subprocess.list2cmdline(['']) == '' or \
25 subprocess.list2cmdline(['foo|bar']) == 'foo|bar')
26
27# Detect python bug 1634343: "subprocess swallows empty arguments under win32"
28# <http://sourceforge.net/tracker/index.php?func=detail&aid=1634343&group_id=5470&atid=105470>
29# Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32"
30# <http://sourceforge.net/tracker/index.php?func=detail&aid=1710802&group_id=5470&atid=105470>
31if has_subprocess_bug():
32 import subprocess_fix
33 subprocess.list2cmdline = subprocess_fix.list2cmdline
34
35from sherrors import *
36
37class NonExitingParser(optparse.OptionParser):
38 """OptionParser default behaviour upon error is to print the error message and
39 exit. Raise a utility error instead.
40 """
41 def error(self, msg):
42 raise UtilityError(msg)
43
44#-------------------------------------------------------------------------------
45# set special builtin
46#-------------------------------------------------------------------------------
47OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters")
48OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False,
49 help='The shell shall disable pathname expansion.')
50OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False,
51 help="""When this option is on, if a simple command fails for any of the \
52 reasons listed in Consequences of Shell Errors or returns an exit status \
53 value >0, and is not part of the compound list following a while, until, \
54 or if keyword, and is not a part of an AND or OR list, and is not a \
55 pipeline preceded by the ! reserved word, then the shell shall immediately \
56 exit.""")
57OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False,
58 help="""The shell shall write to standard error a trace for each command \
59 after it expands the command and before it executes it. It is unspecified \
60 whether the command that turns tracing off is traced.""")
61
62def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags):
63 if 'debug-utility' in debugflags:
64 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
65
66 option, args = OPT_SET.parse_args(args)
67 env = interp.get_env()
68
69 if option.has_f:
70 env.set_opt('-f')
71 if option.has_e:
72 env.set_opt('-e')
73 if option.has_x:
74 env.set_opt('-x')
75 return 0
76
77#-------------------------------------------------------------------------------
78# shift special builtin
79#-------------------------------------------------------------------------------
80def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags):
81 if 'debug-utility' in debugflags:
82 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
83
84 params = interp.get_env().get_positional_args()
85 if args:
86 try:
87 n = int(args[0])
88 if n > len(params):
89 raise ValueError()
90 except ValueError:
91 return 1
92 else:
93 n = 1
94
95 params[:n] = []
96 interp.get_env().set_positional_args(params)
97 return 0
98
99#-------------------------------------------------------------------------------
100# export special builtin
101#-------------------------------------------------------------------------------
102OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters")
103OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False)
104
105def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags):
106 if 'debug-utility' in debugflags:
107 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
108
109 option, args = OPT_EXPORT.parse_args(args)
110 if option.has_p:
111 raise NotImplementedError()
112
113 for arg in args:
114 try:
115 name, value = arg.split('=', 1)
116 except ValueError:
117 name, value = arg, None
118 env = interp.get_env().export(name, value)
119
120 return 0
121
122#-------------------------------------------------------------------------------
123# return special builtin
124#-------------------------------------------------------------------------------
125def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags):
126 if 'debug-utility' in debugflags:
127 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
128 res = 0
129 if args:
130 try:
131 res = int(args[0])
132 except ValueError:
133 res = 0
134 if not 0<=res<=255:
135 res = 0
136
137 # BUG: should be last executed command exit code
138 raise ReturnSignal(res)
139
140#-------------------------------------------------------------------------------
141# trap special builtin
142#-------------------------------------------------------------------------------
143def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags):
144 if 'debug-utility' in debugflags:
145 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
146 if len(args) < 2:
147 stderr.write('trap: usage: trap [[arg] signal_spec ...]\n')
148 return 2
149
150 action = args[0]
151 for sig in args[1:]:
152 try:
153 env.traps[sig] = action
154 except Exception as e:
155 stderr.write('trap: %s\n' % str(e))
156 return 0
157
158#-------------------------------------------------------------------------------
159# unset special builtin
160#-------------------------------------------------------------------------------
161OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions")
162OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False)
163OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False)
164
165def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags):
166 if 'debug-utility' in debugflags:
167 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
168
169 option, args = OPT_UNSET.parse_args(args)
170
171 status = 0
172 env = interp.get_env()
173 for arg in args:
174 try:
175 if option.has_f:
176 env.remove_function(arg)
177 else:
178 del env[arg]
179 except KeyError:
180 pass
181 except VarAssignmentError:
182 status = 1
183
184 return status
185
186#-------------------------------------------------------------------------------
187# wait special builtin
188#-------------------------------------------------------------------------------
189def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags):
190 if 'debug-utility' in debugflags:
191 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
192
193 return interp.wait([int(arg) for arg in args])
194
195#-------------------------------------------------------------------------------
196# cat utility
197#-------------------------------------------------------------------------------
198def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags):
199 if 'debug-utility' in debugflags:
200 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
201
202 if not args:
203 args = ['-']
204
205 status = 0
206 for arg in args:
207 if arg == '-':
208 data = stdin.read()
209 else:
210 path = os.path.join(env['PWD'], arg)
211 try:
212 f = file(path, 'rb')
213 try:
214 data = f.read()
215 finally:
216 f.close()
217 except IOError as e:
218 if e.errno != errno.ENOENT:
219 raise
220 status = 1
221 continue
222 stdout.write(data)
223 stdout.flush()
224 return status
225
226#-------------------------------------------------------------------------------
227# cd utility
228#-------------------------------------------------------------------------------
229OPT_CD = NonExitingParser("cd - change the working directory")
230
231def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags):
232 if 'debug-utility' in debugflags:
233 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
234
235 option, args = OPT_CD.parse_args(args)
236 env = interp.get_env()
237
238 directory = None
239 printdir = False
240 if not args:
241 home = env.get('HOME')
242 if home:
243 # Unspecified, do nothing
244 return 0
245 else:
246 directory = home
247 elif len(args)==1:
248 directory = args[0]
249 if directory=='-':
250 if 'OLDPWD' not in env:
251 raise UtilityError("OLDPWD not set")
252 printdir = True
253 directory = env['OLDPWD']
254 else:
255 raise UtilityError("too many arguments")
256
257 curpath = None
258 # Absolute directories will be handled correctly by the os.path.join call.
259 if not directory.startswith('.') and not directory.startswith('..'):
260 cdpaths = env.get('CDPATH', '.').split(';')
261 for cdpath in cdpaths:
262 p = os.path.join(cdpath, directory)
263 if os.path.isdir(p):
264 curpath = p
265 break
266
267 if curpath is None:
268 curpath = directory
269 curpath = os.path.join(env['PWD'], directory)
270
271 env['OLDPWD'] = env['PWD']
272 env['PWD'] = curpath
273 if printdir:
274 stdout.write('%s\n' % curpath)
275 return 0
276
277#-------------------------------------------------------------------------------
278# colon utility
279#-------------------------------------------------------------------------------
280def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags):
281 if 'debug-utility' in debugflags:
282 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
283 return 0
284
285#-------------------------------------------------------------------------------
286# echo utility
287#-------------------------------------------------------------------------------
288def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags):
289 if 'debug-utility' in debugflags:
290 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
291
292 # Echo only takes arguments, no options. Use printf if you need fancy stuff.
293 output = ' '.join(args) + '\n'
294 stdout.write(output)
295 stdout.flush()
296 return 0
297
298#-------------------------------------------------------------------------------
299# egrep utility
300#-------------------------------------------------------------------------------
301# egrep is usually a shell script.
302# Unfortunately, pysh does not support shell scripts *with arguments* right now,
303# so the redirection is implemented here, assuming grep is available.
304def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
305 if 'debug-utility' in debugflags:
306 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
307
308 return run_command('grep', ['-E'] + args, interp, env, stdin, stdout,
309 stderr, debugflags)
310
311#-------------------------------------------------------------------------------
312# env utility
313#-------------------------------------------------------------------------------
314def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags):
315 if 'debug-utility' in debugflags:
316 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
317
318 if args and args[0]=='-i':
319 raise NotImplementedError('env: -i option is not implemented')
320
321 i = 0
322 for arg in args:
323 if '=' not in arg:
324 break
325 # Update the current environment
326 name, value = arg.split('=', 1)
327 env[name] = value
328 i += 1
329
330 if args[i:]:
331 # Find then execute the specified interpreter
332 utility = env.find_in_path(args[i])
333 if not utility:
334 return 127
335 args[i:i+1] = utility
336 name = args[i]
337 args = args[i+1:]
338 try:
339 return run_command(name, args, interp, env, stdin, stdout, stderr,
340 debugflags)
341 except UtilityError:
342 stderr.write('env: failed to execute %s' % ' '.join([name]+args))
343 return 126
344 else:
345 for pair in env.get_variables().iteritems():
346 stdout.write('%s=%s\n' % pair)
347 return 0
348
349#-------------------------------------------------------------------------------
350# exit utility
351#-------------------------------------------------------------------------------
352def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags):
353 if 'debug-utility' in debugflags:
354 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
355
356 res = None
357 if args:
358 try:
359 res = int(args[0])
360 except ValueError:
361 res = None
362 if not 0<=res<=255:
363 res = None
364
365 if res is None:
366 # BUG: should be last executed command exit code
367 res = 0
368
369 raise ExitSignal(res)
370
371#-------------------------------------------------------------------------------
372# fgrep utility
373#-------------------------------------------------------------------------------
374# see egrep
375def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
376 if 'debug-utility' in debugflags:
377 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
378
379 return run_command('grep', ['-F'] + args, interp, env, stdin, stdout,
380 stderr, debugflags)
381
382#-------------------------------------------------------------------------------
383# gunzip utility
384#-------------------------------------------------------------------------------
385# see egrep
386def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags):
387 if 'debug-utility' in debugflags:
388 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
389
390 return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout,
391 stderr, debugflags)
392
393#-------------------------------------------------------------------------------
394# kill utility
395#-------------------------------------------------------------------------------
396def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags):
397 if 'debug-utility' in debugflags:
398 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
399
400 for arg in args:
401 pid = int(arg)
402 status = subprocess.call(['pskill', '/T', str(pid)],
403 shell=True,
404 stdout=subprocess.PIPE,
405 stderr=subprocess.PIPE)
406 # pskill is asynchronous, hence the stupid polling loop
407 while 1:
408 p = subprocess.Popen(['pslist', str(pid)],
409 shell=True,
410 stdout=subprocess.PIPE,
411 stderr=subprocess.STDOUT)
412 output = p.communicate()[0]
413 if ('process %d was not' % pid) in output:
414 break
415 time.sleep(1)
416 return status
417
418#-------------------------------------------------------------------------------
419# mkdir utility
420#-------------------------------------------------------------------------------
421OPT_MKDIR = NonExitingParser("mkdir - make directories.")
422OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False)
423
424def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags):
425 if 'debug-utility' in debugflags:
426 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
427
428 # TODO: implement umask
429 # TODO: implement proper utility error report
430 option, args = OPT_MKDIR.parse_args(args)
431 for arg in args:
432 path = os.path.join(env['PWD'], arg)
433 if option.has_p:
434 try:
435 os.makedirs(path)
436 except IOError as e:
437 if e.errno != errno.EEXIST:
438 raise
439 else:
440 os.mkdir(path)
441 return 0
442
443#-------------------------------------------------------------------------------
444# netstat utility
445#-------------------------------------------------------------------------------
446def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags):
447 # Do you really expect me to implement netstat ?
448 # This empty form is enough for Mercurial tests since it's
449 # supposed to generate nothing upon success. Faking this test
450 # is not a big deal either.
451 if 'debug-utility' in debugflags:
452 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
453 return 0
454
455#-------------------------------------------------------------------------------
456# pwd utility
457#-------------------------------------------------------------------------------
458OPT_PWD = NonExitingParser("pwd - return working directory name")
459OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True,
460 help="""If the PWD environment variable contains an absolute pathname of \
461 the current directory that does not contain the filenames dot or dot-dot, \
462 pwd shall write this pathname to standard output. Otherwise, the -L option \
463 shall behave as the -P option.""")
464OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False,
465 help="""The absolute pathname written shall not contain filenames that, in \
466 the context of the pathname, refer to files of type symbolic link.""")
467
468def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags):
469 if 'debug-utility' in debugflags:
470 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
471
472 option, args = OPT_PWD.parse_args(args)
473 stdout.write('%s\n' % env['PWD'])
474 return 0
475
476#-------------------------------------------------------------------------------
477# printf utility
478#-------------------------------------------------------------------------------
479RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)')
480
481def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags):
482 if 'debug-utility' in debugflags:
483 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
484
485 def replace(m):
486 assert m.group()
487 g = m.group()[1:]
488 if g.startswith('x'):
489 return chr(int(g[1:], 16))
490 if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g):
491 # Yay, an octal number
492 return chr(int(g, 8))
493 return {
494 'a': '\a',
495 'b': '\b',
496 'f': '\f',
497 'n': '\n',
498 'r': '\r',
499 't': '\t',
500 'v': '\v',
501 '\\': '\\',
502 }.get(g)
503
504 # Convert escape sequences
505 format = re.sub(RE_UNESCAPE, replace, args[0])
506 stdout.write(format % tuple(args[1:]))
507 return 0
508
509#-------------------------------------------------------------------------------
510# true utility
511#-------------------------------------------------------------------------------
512def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags):
513 if 'debug-utility' in debugflags:
514 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
515 return 0
516
517#-------------------------------------------------------------------------------
518# sed utility
519#-------------------------------------------------------------------------------
520RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$')
521
522# cygwin sed fails with some expressions when they do not end with a single space.
523# see unit tests for details. Interestingly, the same expressions works perfectly
524# in cygwin shell.
525def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
526 if 'debug-utility' in debugflags:
527 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
528
529 # Scan pattern arguments and append a space if necessary
530 for i in xrange(len(args)):
531 if not RE_SED.search(args[i]):
532 continue
533 args[i] = args[i] + ' '
534
535 return run_command(name, args, interp, env, stdin, stdout,
536 stderr, debugflags)
537
538#-------------------------------------------------------------------------------
539# sleep utility
540#-------------------------------------------------------------------------------
541def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags):
542 if 'debug-utility' in debugflags:
543 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
544 time.sleep(int(args[0]))
545 return 0
546
547#-------------------------------------------------------------------------------
548# sort utility
549#-------------------------------------------------------------------------------
550OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files")
551
552def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags):
553
554 def sort(path):
555 if path == '-':
556 lines = stdin.readlines()
557 else:
558 try:
559 f = file(path)
560 try:
561 lines = f.readlines()
562 finally:
563 f.close()
564 except IOError as e:
565 stderr.write(str(e) + '\n')
566 return 1
567
568 if lines and lines[-1][-1]!='\n':
569 lines[-1] = lines[-1] + '\n'
570 return lines
571
572 if 'debug-utility' in debugflags:
573 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
574
575 option, args = OPT_SORT.parse_args(args)
576 alllines = []
577
578 if len(args)<=0:
579 args += ['-']
580
581 # Load all files lines
582 curdir = os.getcwd()
583 try:
584 os.chdir(env['PWD'])
585 for path in args:
586 alllines += sort(path)
587 finally:
588 os.chdir(curdir)
589
590 alllines.sort()
591 for line in alllines:
592 stdout.write(line)
593 return 0
594
595#-------------------------------------------------------------------------------
596# hg utility
597#-------------------------------------------------------------------------------
598
599hgcommands = [
600 'add',
601 'addremove',
602 'commit', 'ci',
603 'debugrename',
604 'debugwalk',
605 'falabala', # Dummy command used in a mercurial test
606 'incoming',
607 'locate',
608 'pull',
609 'push',
610 'qinit',
611 'remove', 'rm',
612 'rename', 'mv',
613 'revert',
614 'showconfig',
615 'status', 'st',
616 'strip',
617 ]
618
619def rewriteslashes(name, args):
620 # Several hg commands output file paths, rewrite the separators
621 if len(args) > 1 and name.lower().endswith('python') \
622 and args[0].endswith('hg'):
623 for cmd in hgcommands:
624 if cmd in args[1:]:
625 return True
626
627 # svn output contains many paths with OS specific separators.
628 # Normalize these to unix paths.
629 base = os.path.basename(name)
630 if base.startswith('svn'):
631 return True
632
633 return False
634
635def rewritehg(output):
636 if not output:
637 return output
638 # Rewrite os specific messages
639 output = output.replace(': The system cannot find the file specified',
640 ': No such file or directory')
641 output = re.sub(': Access is denied.*$', ': Permission denied', output)
642 output = output.replace(': No connection could be made because the target machine actively refused it',
643 ': Connection refused')
644 return output
645
646
647def run_command(name, args, interp, env, stdin, stdout,
648 stderr, debugflags):
649 # Execute the command
650 if 'debug-utility' in debugflags:
651 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
652
653 hgbin = interp.options().hgbinary
654 ishg = hgbin and ('hg' in name or args and 'hg' in args[0])
655 unixoutput = 'cygwin' in name or ishg
656
657 exec_env = env.get_variables()
658 try:
659 # BUG: comparing file descriptor is clearly not a reliable way to tell
660 # whether they point on the same underlying object. But in pysh limited
661 # scope this is usually right, we do not expect complicated redirections
662 # besides usual 2>&1.
663 # Still there is one case we have but cannot deal with is when stdout
664 # and stderr are redirected *by pysh caller*. This the reason for the
665 # --redirect pysh() option.
666 # Now, we want to know they are the same because we sometimes need to
667 # transform the command output, mostly remove CR-LF to ensure that
668 # command output is unix-like. Cygwin utilies are a special case because
669 # they explicitely set their output streams to binary mode, so we have
670 # nothing to do. For all others commands, we have to guess whether they
671 # are sending text data, in which case the transformation must be done.
672 # Again, the NUL character test is unreliable but should be enough for
673 # hg tests.
674 redirected = stdout.fileno()==stderr.fileno()
675 if not redirected:
676 p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
677 stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
678 else:
679 p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
680 stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
681 out, err = p.communicate()
682 except WindowsError as e:
683 raise UtilityError(str(e))
684
685 if not unixoutput:
686 def encode(s):
687 if '\0' in s:
688 return s
689 return s.replace('\r\n', '\n')
690 else:
691 encode = lambda s: s
692
693 if rewriteslashes(name, args):
694 encode1_ = encode
695 def encode(s):
696 s = encode1_(s)
697 s = s.replace('\\\\', '\\')
698 s = s.replace('\\', '/')
699 return s
700
701 if ishg:
702 encode2_ = encode
703 def encode(s):
704 return rewritehg(encode2_(s))
705
706 stdout.write(encode(out))
707 if not redirected:
708 stderr.write(encode(err))
709 return p.returncode
710
diff --git a/bitbake/lib/bb/pysh/interp.py b/bitbake/lib/bb/pysh/interp.py
new file mode 100644
index 0000000000..25d8c92ec4
--- /dev/null
+++ b/bitbake/lib/bb/pysh/interp.py
@@ -0,0 +1,1367 @@
1# interp.py - shell interpreter for pysh.
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8"""Implement the shell interpreter.
9
10Most references are made to "The Open Group Base Specifications Issue 6".
11<http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html>
12"""
13# TODO: document the fact input streams must implement fileno() so Popen will work correctly.
14# it requires non-stdin stream to be implemented as files. Still to be tested...
15# DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here.
16# TODO: stop command execution upon error.
17# TODO: sort out the filename/io_number mess. It should be possible to use filenames only.
18# TODO: review subshell implementation
19# TODO: test environment cloning for non-special builtins
20# TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost
21# TODO: unit test for variable assignment
22# TODO: test error management wrt error type/utility type
23# TODO: test for binary output everywhere
24# BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary.
25import base64
26import cPickle as pickle
27import errno
28import glob
29import os
30import re
31import subprocess
32import sys
33import tempfile
34
35try:
36 s = set()
37 del s
38except NameError:
39 from Set import Set as set
40
41import builtin
42from sherrors import *
43import pyshlex
44import pyshyacc
45
46def mappend(func, *args, **kargs):
47 """Like map but assume func returns a list. Returned lists are merged into
48 a single one.
49 """
50 return reduce(lambda a,b: a+b, map(func, *args, **kargs), [])
51
52class FileWrapper:
53 """File object wrapper to ease debugging.
54
55 Allow mode checking and implement file duplication through a simple
56 reference counting scheme. Not sure the latter is really useful since
57 only real file descriptors can be used.
58 """
59 def __init__(self, mode, file, close=True):
60 if mode not in ('r', 'w', 'a'):
61 raise IOError('invalid mode: %s' % mode)
62 self._mode = mode
63 self._close = close
64 if isinstance(file, FileWrapper):
65 if file._refcount[0] <= 0:
66 raise IOError(0, 'Error')
67 self._refcount = file._refcount
68 self._refcount[0] += 1
69 self._file = file._file
70 else:
71 self._refcount = [1]
72 self._file = file
73
74 def dup(self):
75 return FileWrapper(self._mode, self, self._close)
76
77 def fileno(self):
78 """fileno() should be only necessary for input streams."""
79 return self._file.fileno()
80
81 def read(self, size=-1):
82 if self._mode!='r':
83 raise IOError(0, 'Error')
84 return self._file.read(size)
85
86 def readlines(self, *args, **kwargs):
87 return self._file.readlines(*args, **kwargs)
88
89 def write(self, s):
90 if self._mode not in ('w', 'a'):
91 raise IOError(0, 'Error')
92 return self._file.write(s)
93
94 def flush(self):
95 self._file.flush()
96
97 def close(self):
98 if not self._refcount:
99 return
100 assert self._refcount[0] > 0
101
102 self._refcount[0] -= 1
103 if self._refcount[0] == 0:
104 self._mode = 'c'
105 if self._close:
106 self._file.close()
107 self._refcount = None
108
109 def mode(self):
110 return self._mode
111
112 def __getattr__(self, name):
113 if name == 'name':
114 self.name = getattr(self._file, name)
115 return self.name
116 else:
117 raise AttributeError(name)
118
119 def __del__(self):
120 self.close()
121
122
123def win32_open_devnull(mode):
124 return open('NUL', mode)
125
126
127class Redirections:
128 """Stores open files and their mapping to pseudo-sh file descriptor.
129 """
130 # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does
131 # not make 1 to redirect to 4
132 def __init__(self, stdin=None, stdout=None, stderr=None):
133 self._descriptors = {}
134 if stdin is not None:
135 self._add_descriptor(0, stdin)
136 if stdout is not None:
137 self._add_descriptor(1, stdout)
138 if stderr is not None:
139 self._add_descriptor(2, stderr)
140
141 def add_here_document(self, interp, name, content, io_number=None):
142 if io_number is None:
143 io_number = 0
144
145 if name==pyshlex.unquote_wordtree(name):
146 content = interp.expand_here_document(('TOKEN', content))
147
148 # Write document content in a temporary file
149 tmp = tempfile.TemporaryFile()
150 try:
151 tmp.write(content)
152 tmp.flush()
153 tmp.seek(0)
154 self._add_descriptor(io_number, FileWrapper('r', tmp))
155 except:
156 tmp.close()
157 raise
158
159 def add(self, interp, op, filename, io_number=None):
160 if op not in ('<', '>', '>|', '>>', '>&'):
161 # TODO: add descriptor duplication and here_documents
162 raise RedirectionError('Unsupported redirection operator "%s"' % op)
163
164 if io_number is not None:
165 io_number = int(io_number)
166
167 if (op == '>&' and filename.isdigit()) or filename=='-':
168 # No expansion for file descriptors, quote them if you want a filename
169 fullname = filename
170 else:
171 if filename.startswith('/'):
172 # TODO: win32 kludge
173 if filename=='/dev/null':
174 fullname = 'NUL'
175 else:
176 # TODO: handle absolute pathnames, they are unlikely to exist on the
177 # current platform (win32 for instance).
178 raise NotImplementedError()
179 else:
180 fullname = interp.expand_redirection(('TOKEN', filename))
181 if not fullname:
182 raise RedirectionError('%s: ambiguous redirect' % filename)
183 # Build absolute path based on PWD
184 fullname = os.path.join(interp.get_env()['PWD'], fullname)
185
186 if op=='<':
187 return self._add_input_redirection(interp, fullname, io_number)
188 elif op in ('>', '>|'):
189 clobber = ('>|'==op)
190 return self._add_output_redirection(interp, fullname, io_number, clobber)
191 elif op=='>>':
192 return self._add_output_appending(interp, fullname, io_number)
193 elif op=='>&':
194 return self._dup_output_descriptor(fullname, io_number)
195
196 def close(self):
197 if self._descriptors is not None:
198 for desc in self._descriptors.itervalues():
199 desc.flush()
200 desc.close()
201 self._descriptors = None
202
203 def stdin(self):
204 return self._descriptors[0]
205
206 def stdout(self):
207 return self._descriptors[1]
208
209 def stderr(self):
210 return self._descriptors[2]
211
212 def clone(self):
213 clone = Redirections()
214 for desc, fileobj in self._descriptors.iteritems():
215 clone._descriptors[desc] = fileobj.dup()
216 return clone
217
218 def _add_output_redirection(self, interp, filename, io_number, clobber):
219 if io_number is None:
220 # io_number default to standard output
221 io_number = 1
222
223 if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename):
224 # File already exist in no-clobber mode, bail out
225 raise RedirectionError('File "%s" already exists' % filename)
226
227 # Open and register
228 self._add_file_descriptor(io_number, filename, 'w')
229
230 def _add_output_appending(self, interp, filename, io_number):
231 if io_number is None:
232 io_number = 1
233 self._add_file_descriptor(io_number, filename, 'a')
234
235 def _add_input_redirection(self, interp, filename, io_number):
236 if io_number is None:
237 io_number = 0
238 self._add_file_descriptor(io_number, filename, 'r')
239
240 def _add_file_descriptor(self, io_number, filename, mode):
241 try:
242 if filename.startswith('/'):
243 if filename=='/dev/null':
244 f = win32_open_devnull(mode+'b')
245 else:
246 # TODO: handle absolute pathnames, they are unlikely to exist on the
247 # current platform (win32 for instance).
248 raise NotImplementedError('cannot open absolute path %s' % repr(filename))
249 else:
250 f = file(filename, mode+'b')
251 except IOError as e:
252 raise RedirectionError(str(e))
253
254 wrapper = None
255 try:
256 wrapper = FileWrapper(mode, f)
257 f = None
258 self._add_descriptor(io_number, wrapper)
259 except:
260 if f: f.close()
261 if wrapper: wrapper.close()
262 raise
263
264 def _dup_output_descriptor(self, source_fd, dest_fd):
265 if source_fd is None:
266 source_fd = 1
267 self._dup_file_descriptor(source_fd, dest_fd, 'w')
268
269 def _dup_file_descriptor(self, source_fd, dest_fd, mode):
270 source_fd = int(source_fd)
271 if source_fd not in self._descriptors:
272 raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd))
273 source = self._descriptors[source_fd]
274
275 if source.mode()!=mode:
276 raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode))
277
278 if dest_fd=='-':
279 # Close the source descriptor
280 del self._descriptors[source_fd]
281 source.close()
282 else:
283 dest_fd = int(dest_fd)
284 if dest_fd not in self._descriptors:
285 raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd))
286
287 dest = self._descriptors[dest_fd]
288 if dest.mode()!=mode:
289 raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode))
290
291 self._descriptors[dest_fd] = source.dup()
292 dest.close()
293
294 def _add_descriptor(self, io_number, file):
295 io_number = int(io_number)
296
297 if io_number in self._descriptors:
298 # Close the current descriptor
299 d = self._descriptors[io_number]
300 del self._descriptors[io_number]
301 d.close()
302
303 self._descriptors[io_number] = file
304
305 def __str__(self):
306 names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v
307 in self._descriptors.iteritems()]
308 names = ','.join(names)
309 return 'Redirections(%s)' % names
310
311 def __del__(self):
312 self.close()
313
314def cygwin_to_windows_path(path):
315 """Turn /cygdrive/c/foo into c:/foo, or return path if it
316 is not a cygwin path.
317 """
318 if not path.startswith('/cygdrive/'):
319 return path
320 path = path[len('/cygdrive/'):]
321 path = path[:1] + ':' + path[1:]
322 return path
323
324def win32_to_unix_path(path):
325 if path is not None:
326 path = path.replace('\\', '/')
327 return path
328
329_RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?')
330_SHEBANG_CMDS = {
331 '/usr/bin/env': 'env',
332 '/bin/sh': 'pysh',
333 'python': 'python',
334}
335
336def resolve_shebang(path, ignoreshell=False):
337 """Return a list of arguments as shebang interpreter call or an empty list
338 if path does not refer to an executable script.
339 See <http://www.opengroup.org/austin/docs/austin_51r2.txt>.
340
341 ignoreshell - set to True to ignore sh shebangs. Return an empty list instead.
342 """
343 try:
344 f = file(path)
345 try:
346 # At most 80 characters in the first line
347 header = f.read(80).splitlines()[0]
348 finally:
349 f.close()
350
351 m = _RE_SHEBANG.search(header)
352 if not m:
353 return []
354 cmd, arg = m.group(1,2)
355 if os.path.isfile(cmd):
356 # Keep this one, the hg script for instance contains a weird windows
357 # shebang referencing the current python install.
358 cmdfile = os.path.basename(cmd).lower()
359 if cmdfile == 'python.exe':
360 cmd = 'python'
361 pass
362 elif cmd not in _SHEBANG_CMDS:
363 raise CommandNotFound('Unknown interpreter "%s" referenced in '\
364 'shebang' % header)
365 cmd = _SHEBANG_CMDS.get(cmd)
366 if cmd is None or (ignoreshell and cmd == 'pysh'):
367 return []
368 if arg is None:
369 return [cmd, win32_to_unix_path(path)]
370 return [cmd, arg, win32_to_unix_path(path)]
371 except IOError as e:
372 if e.errno!=errno.ENOENT and \
373 (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM
374 raise
375 return []
376
377def win32_find_in_path(name, path):
378 if isinstance(path, str):
379 path = path.split(os.pathsep)
380
381 exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep)
382 for p in path:
383 p_name = os.path.join(p, name)
384
385 prefix = resolve_shebang(p_name)
386 if prefix:
387 return prefix
388
389 for ext in exts:
390 p_name_ext = p_name + ext
391 if os.path.exists(p_name_ext):
392 return [win32_to_unix_path(p_name_ext)]
393 return []
394
395class Traps(dict):
396 def __setitem__(self, key, value):
397 if key not in ('EXIT',):
398 raise NotImplementedError()
399 super(Traps, self).__setitem__(key, value)
400
401# IFS white spaces character class
402_IFS_WHITESPACES = (' ', '\t', '\n')
403
404class Environment:
405 """Environment holds environment variables, export table, function
406 definitions and whatever is defined in 2.12 "Shell Execution Environment",
407 redirection excepted.
408 """
409 def __init__(self, pwd):
410 self._opt = set() #Shell options
411
412 self._functions = {}
413 self._env = {'?': '0', '#': '0'}
414 self._exported = set([
415 'HOME', 'IFS', 'PATH'
416 ])
417
418 # Set environment vars with side-effects
419 self._ifs_ws = None # Set of IFS whitespace characters
420 self._ifs_re = None # Regular expression used to split between words using IFS classes
421 self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values
422 self['PWD'] = pwd
423 self.traps = Traps()
424
425 def clone(self, subshell=False):
426 env = Environment(self['PWD'])
427 env._opt = set(self._opt)
428 for k,v in self.get_variables().iteritems():
429 if k in self._exported:
430 env.export(k,v)
431 elif subshell:
432 env[k] = v
433
434 if subshell:
435 env._functions = dict(self._functions)
436
437 return env
438
439 def __getitem__(self, key):
440 if key in ('@', '*', '-', '$'):
441 raise NotImplementedError('%s is not implemented' % repr(key))
442 return self._env[key]
443
444 def get(self, key, defval=None):
445 try:
446 return self[key]
447 except KeyError:
448 return defval
449
450 def __setitem__(self, key, value):
451 if key=='IFS':
452 # Update the whitespace/non-whitespace classes
453 self._update_ifs(value)
454 elif key=='PWD':
455 pwd = os.path.abspath(value)
456 if not os.path.isdir(pwd):
457 raise VarAssignmentError('Invalid directory %s' % value)
458 value = pwd
459 elif key in ('?', '!'):
460 value = str(int(value))
461 self._env[key] = value
462
463 def __delitem__(self, key):
464 if key in ('IFS', 'PWD', '?'):
465 raise VarAssignmentError('%s cannot be unset' % key)
466 del self._env[key]
467
468 def __contains__(self, item):
469 return item in self._env
470
471 def set_positional_args(self, args):
472 """Set the content of 'args' as positional argument from 1 to len(args).
473 Return previous argument as a list of strings.
474 """
475 # Save and remove previous arguments
476 prevargs = []
477 for i in xrange(int(self._env['#'])):
478 i = str(i+1)
479 prevargs.append(self._env[i])
480 del self._env[i]
481 self._env['#'] = '0'
482
483 #Set new ones
484 for i,arg in enumerate(args):
485 self._env[str(i+1)] = str(arg)
486 self._env['#'] = str(len(args))
487
488 return prevargs
489
490 def get_positional_args(self):
491 return [self._env[str(i+1)] for i in xrange(int(self._env['#']))]
492
493 def get_variables(self):
494 return dict(self._env)
495
496 def export(self, key, value=None):
497 if value is not None:
498 self[key] = value
499 self._exported.add(key)
500
501 def get_exported(self):
502 return [(k,self._env.get(k)) for k in self._exported]
503
504 def split_fields(self, word):
505 if not self._ifs_ws or not word:
506 return [word]
507 return re.split(self._ifs_re, word)
508
509 def _update_ifs(self, value):
510 """Update the split_fields related variables when IFS character set is
511 changed.
512 """
513 # TODO: handle NULL IFS
514
515 # Separate characters in whitespace and non-whitespace
516 chars = set(value)
517 ws = [c for c in chars if c in _IFS_WHITESPACES]
518 nws = [c for c in chars if c not in _IFS_WHITESPACES]
519
520 # Keep whitespaces in a string for left and right stripping
521 self._ifs_ws = ''.join(ws)
522
523 # Build a regexp to split fields
524 trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']'
525 if nws:
526 # First, the single non-whitespace occurence.
527 nws = '[' + ''.join([re.escape(c) for c in nws]) + ']'
528 nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)'
529 else:
530 # Then mix all parts with quantifiers
531 nws = trailing + '+'
532 self._ifs_re = re.compile(nws)
533
534 def has_opt(self, opt, val=None):
535 return (opt, val) in self._opt
536
537 def set_opt(self, opt, val=None):
538 self._opt.add((opt, val))
539
540 def find_in_path(self, name, pwd=False):
541 path = self._env.get('PATH', '').split(os.pathsep)
542 if pwd:
543 path[:0] = [self['PWD']]
544 if os.name == 'nt':
545 return win32_find_in_path(name, self._env.get('PATH', ''))
546 else:
547 raise NotImplementedError()
548
549 def define_function(self, name, body):
550 if not is_name(name):
551 raise ShellSyntaxError('%s is not a valid function name' % repr(name))
552 self._functions[name] = body
553
554 def remove_function(self, name):
555 del self._functions[name]
556
557 def is_function(self, name):
558 return name in self._functions
559
560 def get_function(self, name):
561 return self._functions.get(name)
562
563
564name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
565name_charset = dict(zip(name_charset,name_charset))
566
567def match_name(s):
568 """Return the length in characters of the longest prefix made of name
569 allowed characters in s.
570 """
571 for i,c in enumerate(s):
572 if c not in name_charset:
573 return s[:i]
574 return s
575
576def is_name(s):
577 return len([c for c in s if c not in name_charset])<=0
578
579def is_special_param(c):
580 return len(c)==1 and c in ('@','*','#','?','-','$','!','0')
581
582def utility_not_implemented(name, *args, **kwargs):
583 raise NotImplementedError('%s utility is not implemented' % name)
584
585
586class Utility:
587 """Define utilities properties:
588 func -- utility callable. See builtin module for utility samples.
589 is_special -- see XCU 2.8.
590 """
591 def __init__(self, func, is_special=0):
592 self.func = func
593 self.is_special = bool(is_special)
594
595
596def encodeargs(args):
597 def encodearg(s):
598 lines = base64.encodestring(s)
599 lines = [l.splitlines()[0] for l in lines]
600 return ''.join(lines)
601
602 s = pickle.dumps(args)
603 return encodearg(s)
604
605def decodeargs(s):
606 s = base64.decodestring(s)
607 return pickle.loads(s)
608
609
610class GlobError(Exception):
611 pass
612
613class Options:
614 def __init__(self):
615 # True if Mercurial operates with binary streams
616 self.hgbinary = True
617
618class Interpreter:
619 # Implementation is very basic: the execute() method just makes a DFS on the
620 # AST and execute nodes one by one. Nodes are tuple (name,obj) where name
621 # is a string identifier and obj the AST element returned by the parser.
622 #
623 # Handler are named after the node identifiers.
624 # TODO: check node names and remove the switch in execute with some
625 # dynamic getattr() call to find node handlers.
626 """Shell interpreter.
627
628 The following debugging flags can be passed:
629 debug-parsing - enable PLY debugging.
630 debug-tree - print the generated AST.
631 debug-cmd - trace command execution before word expansion, plus exit status.
632 debug-utility - trace utility execution.
633 """
634
635 # List supported commands.
636 COMMANDS = {
637 'cat': Utility(builtin.utility_cat,),
638 'cd': Utility(builtin.utility_cd,),
639 ':': Utility(builtin.utility_colon,),
640 'echo': Utility(builtin.utility_echo),
641 'env': Utility(builtin.utility_env),
642 'exit': Utility(builtin.utility_exit),
643 'export': Utility(builtin.builtin_export, is_special=1),
644 'egrep': Utility(builtin.utility_egrep),
645 'fgrep': Utility(builtin.utility_fgrep),
646 'gunzip': Utility(builtin.utility_gunzip),
647 'kill': Utility(builtin.utility_kill),
648 'mkdir': Utility(builtin.utility_mkdir),
649 'netstat': Utility(builtin.utility_netstat),
650 'printf': Utility(builtin.utility_printf),
651 'pwd': Utility(builtin.utility_pwd),
652 'return': Utility(builtin.builtin_return, is_special=1),
653 'sed': Utility(builtin.utility_sed,),
654 'set': Utility(builtin.builtin_set,),
655 'shift': Utility(builtin.builtin_shift,),
656 'sleep': Utility(builtin.utility_sleep,),
657 'sort': Utility(builtin.utility_sort,),
658 'trap': Utility(builtin.builtin_trap, is_special=1),
659 'true': Utility(builtin.utility_true),
660 'unset': Utility(builtin.builtin_unset, is_special=1),
661 'wait': Utility(builtin.builtin_wait, is_special=1),
662 }
663
664 def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None,
665 stdout=None, stderr=None, opts=Options()):
666 self._env = env
667 if self._env is None:
668 self._env = Environment(pwd)
669 self._children = {}
670
671 self._redirs = redirs
672 self._close_redirs = False
673
674 if self._redirs is None:
675 if stdin is None:
676 stdin = sys.stdin
677 if stdout is None:
678 stdout = sys.stdout
679 if stderr is None:
680 stderr = sys.stderr
681 stdin = FileWrapper('r', stdin, False)
682 stdout = FileWrapper('w', stdout, False)
683 stderr = FileWrapper('w', stderr, False)
684 self._redirs = Redirections(stdin, stdout, stderr)
685 self._close_redirs = True
686
687 self._debugflags = list(debugflags)
688 self._logfile = sys.stderr
689 self._options = opts
690
691 def close(self):
692 """Must be called when the interpreter is no longer used."""
693 script = self._env.traps.get('EXIT')
694 if script:
695 try:
696 self.execute_script(script=script)
697 except:
698 pass
699
700 if self._redirs is not None and self._close_redirs:
701 self._redirs.close()
702 self._redirs = None
703
704 def log(self, s):
705 self._logfile.write(s)
706 self._logfile.flush()
707
708 def __getitem__(self, key):
709 return self._env[key]
710
711 def __setitem__(self, key, value):
712 self._env[key] = value
713
714 def options(self):
715 return self._options
716
717 def redirect(self, redirs, ios):
718 def add_redir(io):
719 if isinstance(io, pyshyacc.IORedirect):
720 redirs.add(self, io.op, io.filename, io.io_number)
721 else:
722 redirs.add_here_document(self, io.name, io.content, io.io_number)
723
724 map(add_redir, ios)
725 return redirs
726
727 def execute_script(self, script=None, ast=None, sourced=False,
728 scriptpath=None):
729 """If script is not None, parse the input. Otherwise takes the supplied
730 AST. Then execute the AST.
731 Return the script exit status.
732 """
733 try:
734 if scriptpath is not None:
735 self._env['0'] = os.path.abspath(scriptpath)
736
737 if script is not None:
738 debug_parsing = ('debug-parsing' in self._debugflags)
739 cmds, script = pyshyacc.parse(script, True, debug_parsing)
740 if 'debug-tree' in self._debugflags:
741 pyshyacc.print_commands(cmds, self._logfile)
742 self._logfile.flush()
743 else:
744 cmds, script = ast, ''
745
746 status = 0
747 for cmd in cmds:
748 try:
749 status = self.execute(cmd)
750 except ExitSignal as e:
751 if sourced:
752 raise
753 status = int(e.args[0])
754 return status
755 except ShellError:
756 self._env['?'] = 1
757 raise
758 if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags:
759 self.log('returncode ' + str(status)+ '\n')
760 return status
761 except CommandNotFound as e:
762 print >>self._redirs.stderr, str(e)
763 self._redirs.stderr.flush()
764 # Command not found by non-interactive shell
765 # return 127
766 raise
767 except RedirectionError as e:
768 # TODO: should be handled depending on the utility status
769 print >>self._redirs.stderr, str(e)
770 self._redirs.stderr.flush()
771 # Command not found by non-interactive shell
772 # return 127
773 raise
774
775 def dotcommand(self, env, args):
776 if len(args) < 1:
777 raise ShellError('. expects at least one argument')
778 path = args[0]
779 if '/' not in path:
780 found = env.find_in_path(args[0], True)
781 if found:
782 path = found[0]
783 script = file(path).read()
784 return self.execute_script(script=script, sourced=True)
785
786 def execute(self, token, redirs=None):
787 """Execute and AST subtree with supplied redirections overriding default
788 interpreter ones.
789 Return the exit status.
790 """
791 if not token:
792 return 0
793
794 if redirs is None:
795 redirs = self._redirs
796
797 if isinstance(token, list):
798 # Commands sequence
799 res = 0
800 for t in token:
801 res = self.execute(t, redirs)
802 return res
803
804 type, value = token
805 status = 0
806 if type=='simple_command':
807 redirs_copy = redirs.clone()
808 try:
809 # TODO: define and handle command return values
810 # TODO: implement set -e
811 status = self._execute_simple_command(value, redirs_copy)
812 finally:
813 redirs_copy.close()
814 elif type=='pipeline':
815 status = self._execute_pipeline(value, redirs)
816 elif type=='and_or':
817 status = self._execute_and_or(value, redirs)
818 elif type=='for_clause':
819 status = self._execute_for_clause(value, redirs)
820 elif type=='while_clause':
821 status = self._execute_while_clause(value, redirs)
822 elif type=='function_definition':
823 status = self._execute_function_definition(value, redirs)
824 elif type=='brace_group':
825 status = self._execute_brace_group(value, redirs)
826 elif type=='if_clause':
827 status = self._execute_if_clause(value, redirs)
828 elif type=='subshell':
829 status = self.subshell(ast=value.cmds, redirs=redirs)
830 elif type=='async':
831 status = self._asynclist(value)
832 elif type=='redirect_list':
833 redirs_copy = self.redirect(redirs.clone(), value.redirs)
834 try:
835 status = self.execute(value.cmd, redirs_copy)
836 finally:
837 redirs_copy.close()
838 else:
839 raise NotImplementedError('Unsupported token type ' + type)
840
841 if status < 0:
842 status = 255
843 return status
844
845 def _execute_if_clause(self, if_clause, redirs):
846 cond_status = self.execute(if_clause.cond, redirs)
847 if cond_status==0:
848 return self.execute(if_clause.if_cmds, redirs)
849 else:
850 return self.execute(if_clause.else_cmds, redirs)
851
852 def _execute_brace_group(self, group, redirs):
853 status = 0
854 for cmd in group.cmds:
855 status = self.execute(cmd, redirs)
856 return status
857
858 def _execute_function_definition(self, fundef, redirs):
859 self._env.define_function(fundef.name, fundef.body)
860 return 0
861
862 def _execute_while_clause(self, while_clause, redirs):
863 status = 0
864 while 1:
865 cond_status = 0
866 for cond in while_clause.condition:
867 cond_status = self.execute(cond, redirs)
868
869 if cond_status:
870 break
871
872 for cmd in while_clause.cmds:
873 status = self.execute(cmd, redirs)
874
875 return status
876
877 def _execute_for_clause(self, for_clause, redirs):
878 if not is_name(for_clause.name):
879 raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name))
880 items = mappend(self.expand_token, for_clause.items)
881
882 status = 0
883 for item in items:
884 self._env[for_clause.name] = item
885 for cmd in for_clause.cmds:
886 status = self.execute(cmd, redirs)
887 return status
888
889 def _execute_and_or(self, or_and, redirs):
890 res = self.execute(or_and.left, redirs)
891 if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0):
892 res = self.execute(or_and.right, redirs)
893 return res
894
895 def _execute_pipeline(self, pipeline, redirs):
896 if len(pipeline.commands)==1:
897 status = self.execute(pipeline.commands[0], redirs)
898 else:
899 # Execute all commands one after the other
900 status = 0
901 inpath, outpath = None, None
902 try:
903 # Commands inputs and outputs cannot really be plugged as done
904 # by a real shell. Run commands sequentially and chain their
905 # input/output throught temporary files.
906 tmpfd, inpath = tempfile.mkstemp()
907 os.close(tmpfd)
908 tmpfd, outpath = tempfile.mkstemp()
909 os.close(tmpfd)
910
911 inpath = win32_to_unix_path(inpath)
912 outpath = win32_to_unix_path(outpath)
913
914 for i, cmd in enumerate(pipeline.commands):
915 call_redirs = redirs.clone()
916 try:
917 if i!=0:
918 call_redirs.add(self, '<', inpath)
919 if i!=len(pipeline.commands)-1:
920 call_redirs.add(self, '>', outpath)
921
922 status = self.execute(cmd, call_redirs)
923
924 # Chain inputs/outputs
925 inpath, outpath = outpath, inpath
926 finally:
927 call_redirs.close()
928 finally:
929 if inpath: os.remove(inpath)
930 if outpath: os.remove(outpath)
931
932 if pipeline.reverse_status:
933 status = int(not status)
934 self._env['?'] = status
935 return status
936
937 def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others):
938 assert interp is self
939
940 func = env.get_function(name)
941 #Set positional parameters
942 prevargs = None
943 try:
944 prevargs = env.set_positional_args(args)
945 try:
946 redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup())
947 try:
948 status = self.execute(func, redirs)
949 finally:
950 redirs.close()
951 except ReturnSignal as e:
952 status = int(e.args[0])
953 env['?'] = status
954 return status
955 finally:
956 #Reset positional parameters
957 if prevargs is not None:
958 env.set_positional_args(prevargs)
959
960 def _execute_simple_command(self, token, redirs):
961 """Can raise ReturnSignal when return builtin is called, ExitSignal when
962 exit is called, and other shell exceptions upon builtin failures.
963 """
964 debug_command = 'debug-cmd' in self._debugflags
965 if debug_command:
966 self.log('word' + repr(token.words) + '\n')
967 self.log('assigns' + repr(token.assigns) + '\n')
968 self.log('redirs' + repr(token.redirs) + '\n')
969
970 is_special = None
971 env = self._env
972
973 try:
974 # Word expansion
975 args = []
976 for word in token.words:
977 args += self.expand_token(word)
978 if is_special is None and args:
979 is_special = env.is_function(args[0]) or \
980 (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special)
981
982 if debug_command:
983 self.log('_execute_simple_command' + str(args) + '\n')
984
985 if not args:
986 # Redirections happen is a subshell
987 redirs = redirs.clone()
988 elif not is_special:
989 env = self._env.clone()
990
991 # Redirections
992 self.redirect(redirs, token.redirs)
993
994 # Variables assignments
995 res = 0
996 for type,(k,v) in token.assigns:
997 status, expanded = self.expand_variable((k,v))
998 if status is not None:
999 res = status
1000 if args:
1001 env.export(k, expanded)
1002 else:
1003 env[k] = expanded
1004
1005 if args and args[0] in ('.', 'source'):
1006 res = self.dotcommand(env, args[1:])
1007 elif args:
1008 if args[0] in self.COMMANDS:
1009 command = self.COMMANDS[args[0]]
1010 elif env.is_function(args[0]):
1011 command = Utility(self._execute_function, is_special=True)
1012 else:
1013 if not '/' in args[0].replace('\\', '/'):
1014 cmd = env.find_in_path(args[0])
1015 if not cmd:
1016 # TODO: test error code on unknown command => 127
1017 raise CommandNotFound('Unknown command: "%s"' % args[0])
1018 else:
1019 # Handle commands like '/cygdrive/c/foo.bat'
1020 cmd = cygwin_to_windows_path(args[0])
1021 if not os.path.exists(cmd):
1022 raise CommandNotFound('%s: No such file or directory' % args[0])
1023 shebang = resolve_shebang(cmd)
1024 if shebang:
1025 cmd = shebang
1026 else:
1027 cmd = [cmd]
1028 args[0:1] = cmd
1029 command = Utility(builtin.run_command)
1030
1031 # Command execution
1032 if 'debug-cmd' in self._debugflags:
1033 self.log('redirections ' + str(redirs) + '\n')
1034
1035 res = command.func(args[0], args[1:], self, env,
1036 redirs.stdin(), redirs.stdout(),
1037 redirs.stderr(), self._debugflags)
1038
1039 if self._env.has_opt('-x'):
1040 # Trace command execution in shell environment
1041 # BUG: would be hard to reproduce a real shell behaviour since
1042 # the AST is not annotated with source lines/tokens.
1043 self._redirs.stdout().write(' '.join(args))
1044
1045 except ReturnSignal:
1046 raise
1047 except ShellError as e:
1048 if is_special or isinstance(e, (ExitSignal,
1049 ShellSyntaxError, ExpansionError)):
1050 raise e
1051 self._redirs.stderr().write(str(e)+'\n')
1052 return 1
1053
1054 return res
1055
1056 def expand_token(self, word):
1057 """Expand a word as specified in [2.6 Word Expansions]. Return the list
1058 of expanded words.
1059 """
1060 status, wtrees = self._expand_word(word)
1061 return map(pyshlex.wordtree_as_string, wtrees)
1062
1063 def expand_variable(self, word):
1064 """Return a status code (or None if no command expansion occurred)
1065 and a single word.
1066 """
1067 status, wtrees = self._expand_word(word, pathname=False, split=False)
1068 words = map(pyshlex.wordtree_as_string, wtrees)
1069 assert len(words)==1
1070 return status, words[0]
1071
1072 def expand_here_document(self, word):
1073 """Return the expanded document as a single word. The here document is
1074 assumed to be unquoted.
1075 """
1076 status, wtrees = self._expand_word(word, pathname=False,
1077 split=False, here_document=True)
1078 words = map(pyshlex.wordtree_as_string, wtrees)
1079 assert len(words)==1
1080 return words[0]
1081
1082 def expand_redirection(self, word):
1083 """Return a single word."""
1084 return self.expand_variable(word)[1]
1085
1086 def get_env(self):
1087 return self._env
1088
1089 def _expand_word(self, token, pathname=True, split=True, here_document=False):
1090 wtree = pyshlex.make_wordtree(token[1], here_document=here_document)
1091
1092 # TODO: implement tilde expansion
1093 def expand(wtree):
1094 """Return a pseudo wordtree: the tree or its subelements can be empty
1095 lists when no value result from the expansion.
1096 """
1097 status = None
1098 for part in wtree:
1099 if not isinstance(part, list):
1100 continue
1101 if part[0]in ("'", '\\'):
1102 continue
1103 elif part[0] in ('`', '$('):
1104 status, result = self._expand_command(part)
1105 part[:] = result
1106 elif part[0] in ('$', '${'):
1107 part[:] = self._expand_parameter(part, wtree[0]=='"', split)
1108 elif part[0] in ('', '"'):
1109 status, result = expand(part)
1110 part[:] = result
1111 else:
1112 raise NotImplementedError('%s expansion is not implemented'
1113 % part[0])
1114 # [] is returned when an expansion result in no-field,
1115 # like an empty $@
1116 wtree = [p for p in wtree if p != []]
1117 if len(wtree) < 3:
1118 return status, []
1119 return status, wtree
1120
1121 status, wtree = expand(wtree)
1122 if len(wtree) == 0:
1123 return status, wtree
1124 wtree = pyshlex.normalize_wordtree(wtree)
1125
1126 if split:
1127 wtrees = self._split_fields(wtree)
1128 else:
1129 wtrees = [wtree]
1130
1131 if pathname:
1132 wtrees = mappend(self._expand_pathname, wtrees)
1133
1134 wtrees = map(self._remove_quotes, wtrees)
1135 return status, wtrees
1136
1137 def _expand_command(self, wtree):
1138 # BUG: there is something to do with backslashes and quoted
1139 # characters here
1140 command = pyshlex.wordtree_as_string(wtree[1:-1])
1141 status, output = self.subshell_output(command)
1142 return status, ['', output, '']
1143
1144 def _expand_parameter(self, wtree, quoted=False, split=False):
1145 """Return a valid wtree or an empty list when no parameter results."""
1146 # Get the parameter name
1147 # TODO: implement weird expansion rules with ':'
1148 name = pyshlex.wordtree_as_string(wtree[1:-1])
1149 if not is_name(name) and not is_special_param(name):
1150 raise ExpansionError('Bad substitution "%s"' % name)
1151 # TODO: implement special parameters
1152 if name in ('@', '*'):
1153 args = self._env.get_positional_args()
1154 if len(args) == 0:
1155 return []
1156 if len(args)<2:
1157 return ['', ''.join(args), '']
1158
1159 sep = self._env.get('IFS', '')[:1]
1160 if split and quoted and name=='@':
1161 # Introduce a new token to tell the caller that these parameters
1162 # cause a split as specified in 2.5.2
1163 return ['@'] + args + ['']
1164 else:
1165 return ['', sep.join(args), '']
1166
1167 return ['', self._env.get(name, ''), '']
1168
1169 def _split_fields(self, wtree):
1170 def is_empty(split):
1171 return split==['', '', '']
1172
1173 def split_positional(quoted):
1174 # Return a list of wtree split according positional parameters rules.
1175 # All remaining '@' groups are removed.
1176 assert quoted[0]=='"'
1177
1178 splits = [[]]
1179 for part in quoted:
1180 if not isinstance(part, list) or part[0]!='@':
1181 splits[-1].append(part)
1182 else:
1183 # Empty or single argument list were dealt with already
1184 assert len(part)>3
1185 # First argument must join with the beginning part of the original word
1186 splits[-1].append(part[1])
1187 # Create double-quotes expressions for every argument after the first
1188 for arg in part[2:-1]:
1189 splits[-1].append('"')
1190 splits.append(['"', arg])
1191 return splits
1192
1193 # At this point, all expansions but pathnames have occured. Only quoted
1194 # and positional sequences remain. Thus, all candidates for field splitting
1195 # are in the tree root, or are positional splits ('@') and lie in root
1196 # children.
1197 if not wtree or wtree[0] not in ('', '"'):
1198 # The whole token is quoted or empty, nothing to split
1199 return [wtree]
1200
1201 if wtree[0]=='"':
1202 wtree = ['', wtree, '']
1203
1204 result = [['', '']]
1205 for part in wtree[1:-1]:
1206 if isinstance(part, list):
1207 if part[0]=='"':
1208 splits = split_positional(part)
1209 if len(splits)<=1:
1210 result[-1] += [part, '']
1211 else:
1212 # Terminate the current split
1213 result[-1] += [splits[0], '']
1214 result += splits[1:-1]
1215 # Create a new split
1216 result += [['', splits[-1], '']]
1217 else:
1218 result[-1] += [part, '']
1219 else:
1220 splits = self._env.split_fields(part)
1221 if len(splits)<=1:
1222 # No split
1223 result[-1][-1] += part
1224 else:
1225 # Terminate the current resulting part and create a new one
1226 result[-1][-1] += splits[0]
1227 result[-1].append('')
1228 result += [['', r, ''] for r in splits[1:-1]]
1229 result += [['', splits[-1]]]
1230 result[-1].append('')
1231
1232 # Leading and trailing empty groups come from leading/trailing blanks
1233 if result and is_empty(result[-1]):
1234 result[-1:] = []
1235 if result and is_empty(result[0]):
1236 result[:1] = []
1237 return result
1238
1239 def _expand_pathname(self, wtree):
1240 """See [2.6.6 Pathname Expansion]."""
1241 if self._env.has_opt('-f'):
1242 return [wtree]
1243
1244 # All expansions have been performed, only quoted sequences should remain
1245 # in the tree. Generate the pattern by folding the tree, escaping special
1246 # characters when appear quoted
1247 special_chars = '*?[]'
1248
1249 def make_pattern(wtree):
1250 subpattern = []
1251 for part in wtree[1:-1]:
1252 if isinstance(part, list):
1253 part = make_pattern(part)
1254 elif wtree[0]!='':
1255 for c in part:
1256 # Meta-characters cannot be quoted
1257 if c in special_chars:
1258 raise GlobError()
1259 subpattern.append(part)
1260 return ''.join(subpattern)
1261
1262 def pwd_glob(pattern):
1263 cwd = os.getcwd()
1264 os.chdir(self._env['PWD'])
1265 try:
1266 return glob.glob(pattern)
1267 finally:
1268 os.chdir(cwd)
1269
1270 #TODO: check working directory issues here wrt relative patterns
1271 try:
1272 pattern = make_pattern(wtree)
1273 paths = pwd_glob(pattern)
1274 except GlobError:
1275 # BUG: Meta-characters were found in quoted sequences. The should
1276 # have been used literally but this is unsupported in current glob module.
1277 # Instead we consider the whole tree must be used literally and
1278 # therefore there is no point in globbing. This is wrong when meta
1279 # characters are mixed with quoted meta in the same pattern like:
1280 # < foo*"py*" >
1281 paths = []
1282
1283 if not paths:
1284 return [wtree]
1285 return [['', path, ''] for path in paths]
1286
1287 def _remove_quotes(self, wtree):
1288 """See [2.6.7 Quote Removal]."""
1289
1290 def unquote(wtree):
1291 unquoted = []
1292 for part in wtree[1:-1]:
1293 if isinstance(part, list):
1294 part = unquote(part)
1295 unquoted.append(part)
1296 return ''.join(unquoted)
1297
1298 return ['', unquote(wtree), '']
1299
1300 def subshell(self, script=None, ast=None, redirs=None):
1301 """Execute the script or AST in a subshell, with inherited redirections
1302 if redirs is not None.
1303 """
1304 if redirs:
1305 sub_redirs = redirs
1306 else:
1307 sub_redirs = redirs.clone()
1308
1309 subshell = None
1310 try:
1311 subshell = Interpreter(None, self._debugflags, self._env.clone(True),
1312 sub_redirs, opts=self._options)
1313 return subshell.execute_script(script, ast)
1314 finally:
1315 if not redirs: sub_redirs.close()
1316 if subshell: subshell.close()
1317
1318 def subshell_output(self, script):
1319 """Execute the script in a subshell and return the captured output."""
1320 # Create temporary file to capture subshell output
1321 tmpfd, tmppath = tempfile.mkstemp()
1322 try:
1323 tmpfile = os.fdopen(tmpfd, 'wb')
1324 stdout = FileWrapper('w', tmpfile)
1325
1326 redirs = Redirections(self._redirs.stdin().dup(),
1327 stdout,
1328 self._redirs.stderr().dup())
1329 try:
1330 status = self.subshell(script=script, redirs=redirs)
1331 finally:
1332 redirs.close()
1333 redirs = None
1334
1335 # Extract subshell standard output
1336 tmpfile = open(tmppath, 'rb')
1337 try:
1338 output = tmpfile.read()
1339 return status, output.rstrip('\n')
1340 finally:
1341 tmpfile.close()
1342 finally:
1343 os.remove(tmppath)
1344
1345 def _asynclist(self, cmd):
1346 args = (self._env.get_variables(), cmd)
1347 arg = encodeargs(args)
1348 assert len(args) < 30*1024
1349 cmd = ['pysh.bat', '--ast', '-c', arg]
1350 p = subprocess.Popen(cmd, cwd=self._env['PWD'])
1351 self._children[p.pid] = p
1352 self._env['!'] = p.pid
1353 return 0
1354
1355 def wait(self, pids=None):
1356 if not pids:
1357 pids = self._children.keys()
1358
1359 status = 127
1360 for pid in pids:
1361 if pid not in self._children:
1362 continue
1363 p = self._children.pop(pid)
1364 status = p.wait()
1365
1366 return status
1367
diff --git a/bitbake/lib/bb/pysh/lsprof.py b/bitbake/lib/bb/pysh/lsprof.py
new file mode 100644
index 0000000000..b1831c22a7
--- /dev/null
+++ b/bitbake/lib/bb/pysh/lsprof.py
@@ -0,0 +1,116 @@
1#! /usr/bin/env python
2
3import sys
4from _lsprof import Profiler, profiler_entry
5
6__all__ = ['profile', 'Stats']
7
8def profile(f, *args, **kwds):
9 """XXX docstring"""
10 p = Profiler()
11 p.enable(subcalls=True, builtins=True)
12 try:
13 f(*args, **kwds)
14 finally:
15 p.disable()
16 return Stats(p.getstats())
17
18
19class Stats(object):
20 """XXX docstring"""
21
22 def __init__(self, data):
23 self.data = data
24
25 def sort(self, crit="inlinetime"):
26 """XXX docstring"""
27 if crit not in profiler_entry.__dict__:
28 raise ValueError("Can't sort by %s" % crit)
29 self.data.sort(lambda b, a: cmp(getattr(a, crit),
30 getattr(b, crit)))
31 for e in self.data:
32 if e.calls:
33 e.calls.sort(lambda b, a: cmp(getattr(a, crit),
34 getattr(b, crit)))
35
36 def pprint(self, top=None, file=None, limit=None, climit=None):
37 """XXX docstring"""
38 if file is None:
39 file = sys.stdout
40 d = self.data
41 if top is not None:
42 d = d[:top]
43 cols = "% 12s %12s %11.4f %11.4f %s\n"
44 hcols = "% 12s %12s %12s %12s %s\n"
45 cols2 = "+%12s %12s %11.4f %11.4f + %s\n"
46 file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
47 "Inline(ms)", "module:lineno(function)"))
48 count = 0
49 for e in d:
50 file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
51 e.inlinetime, label(e.code)))
52 count += 1
53 if limit is not None and count == limit:
54 return
55 ccount = 0
56 if e.calls:
57 for se in e.calls:
58 file.write(cols % ("+%s" % se.callcount, se.reccallcount,
59 se.totaltime, se.inlinetime,
60 "+%s" % label(se.code)))
61 count += 1
62 ccount += 1
63 if limit is not None and count == limit:
64 return
65 if climit is not None and ccount == climit:
66 break
67
68 def freeze(self):
69 """Replace all references to code objects with string
70 descriptions; this makes it possible to pickle the instance."""
71
72 # this code is probably rather ickier than it needs to be!
73 for i in range(len(self.data)):
74 e = self.data[i]
75 if not isinstance(e.code, str):
76 self.data[i] = type(e)((label(e.code),) + e[1:])
77 if e.calls:
78 for j in range(len(e.calls)):
79 se = e.calls[j]
80 if not isinstance(se.code, str):
81 e.calls[j] = type(se)((label(se.code),) + se[1:])
82
83_fn2mod = {}
84
85def label(code):
86 if isinstance(code, str):
87 return code
88 try:
89 mname = _fn2mod[code.co_filename]
90 except KeyError:
91 for k, v in sys.modules.items():
92 if v is None:
93 continue
94 if not hasattr(v, '__file__'):
95 continue
96 if not isinstance(v.__file__, str):
97 continue
98 if v.__file__.startswith(code.co_filename):
99 mname = _fn2mod[code.co_filename] = k
100 break
101 else:
102 mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
103
104 return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
105
106
107if __name__ == '__main__':
108 import os
109 sys.argv = sys.argv[1:]
110 if not sys.argv:
111 print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
112 sys.exit(2)
113 sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
114 stats = profile(execfile, sys.argv[0], globals(), locals())
115 stats.sort()
116 stats.pprint()
diff --git a/bitbake/lib/bb/pysh/pysh.py b/bitbake/lib/bb/pysh/pysh.py
new file mode 100644
index 0000000000..b4e6145b51
--- /dev/null
+++ b/bitbake/lib/bb/pysh/pysh.py
@@ -0,0 +1,167 @@
1# pysh.py - command processing for pysh.
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8import optparse
9import os
10import sys
11
12import interp
13
14SH_OPT = optparse.OptionParser(prog='pysh', usage="%prog [OPTIONS]", version='0.1')
15SH_OPT.add_option('-c', action='store_true', dest='command_string', default=None,
16 help='A string that shall be interpreted by the shell as one or more commands')
17SH_OPT.add_option('--redirect-to', dest='redirect_to', default=None,
18 help='Redirect script commands stdout and stderr to the specified file')
19# See utility_command in builtin.py about the reason for this flag.
20SH_OPT.add_option('--redirected', dest='redirected', action='store_true', default=False,
21 help='Tell the interpreter that stdout and stderr are actually the same objects, which is really stdout')
22SH_OPT.add_option('--debug-parsing', action='store_true', dest='debug_parsing', default=False,
23 help='Trace PLY execution')
24SH_OPT.add_option('--debug-tree', action='store_true', dest='debug_tree', default=False,
25 help='Display the generated syntax tree.')
26SH_OPT.add_option('--debug-cmd', action='store_true', dest='debug_cmd', default=False,
27 help='Trace command execution before parameters expansion and exit status.')
28SH_OPT.add_option('--debug-utility', action='store_true', dest='debug_utility', default=False,
29 help='Trace utility calls, after parameters expansions')
30SH_OPT.add_option('--ast', action='store_true', dest='ast', default=False,
31 help='Encoded commands to execute in a subprocess')
32SH_OPT.add_option('--profile', action='store_true', default=False,
33 help='Profile pysh run')
34
35
36def split_args(args):
37 # Separate shell arguments from command ones
38 # Just stop at the first argument not starting with a dash. I know, this is completely broken,
39 # it ignores files starting with a dash or may take option values for command file. This is not
40 # supposed to happen for now
41 command_index = len(args)
42 for i,arg in enumerate(args):
43 if not arg.startswith('-'):
44 command_index = i
45 break
46
47 return args[:command_index], args[command_index:]
48
49
50def fixenv(env):
51 path = env.get('PATH')
52 if path is not None:
53 parts = path.split(os.pathsep)
54 # Remove Windows utilities from PATH, they are useless at best and
55 # some of them (find) may be confused with other utilities.
56 parts = [p for p in parts if 'system32' not in p.lower()]
57 env['PATH'] = os.pathsep.join(parts)
58 if env.get('HOME') is None:
59 # Several utilities, including cvsps, cannot work without
60 # a defined HOME directory.
61 env['HOME'] = os.path.expanduser('~')
62 return env
63
64def _sh(cwd, shargs, cmdargs, options, debugflags=None, env=None):
65 if os.environ.get('PYSH_TEXT') != '1':
66 import msvcrt
67 for fp in (sys.stdin, sys.stdout, sys.stderr):
68 msvcrt.setmode(fp.fileno(), os.O_BINARY)
69
70 hgbin = os.environ.get('PYSH_HGTEXT') != '1'
71
72 if debugflags is None:
73 debugflags = []
74 if options.debug_parsing: debugflags.append('debug-parsing')
75 if options.debug_utility: debugflags.append('debug-utility')
76 if options.debug_cmd: debugflags.append('debug-cmd')
77 if options.debug_tree: debugflags.append('debug-tree')
78
79 if env is None:
80 env = fixenv(dict(os.environ))
81 if cwd is None:
82 cwd = os.getcwd()
83
84 if not cmdargs:
85 # Nothing to do
86 return 0
87
88 ast = None
89 command_file = None
90 if options.command_string:
91 input = cmdargs[0]
92 if not options.ast:
93 input += '\n'
94 else:
95 args, input = interp.decodeargs(input), None
96 env, ast = args
97 cwd = env.get('PWD', cwd)
98 else:
99 command_file = cmdargs[0]
100 arguments = cmdargs[1:]
101
102 prefix = interp.resolve_shebang(command_file, ignoreshell=True)
103 if prefix:
104 input = ' '.join(prefix + [command_file] + arguments)
105 else:
106 # Read commands from file
107 f = file(command_file)
108 try:
109 # Trailing newline to help the parser
110 input = f.read() + '\n'
111 finally:
112 f.close()
113
114 redirect = None
115 try:
116 if options.redirected:
117 stdout = sys.stdout
118 stderr = stdout
119 elif options.redirect_to:
120 redirect = open(options.redirect_to, 'wb')
121 stdout = redirect
122 stderr = redirect
123 else:
124 stdout = sys.stdout
125 stderr = sys.stderr
126
127 # TODO: set arguments to environment variables
128 opts = interp.Options()
129 opts.hgbinary = hgbin
130 ip = interp.Interpreter(cwd, debugflags, stdout=stdout, stderr=stderr,
131 opts=opts)
132 try:
133 # Export given environment in shell object
134 for k,v in env.iteritems():
135 ip.get_env().export(k,v)
136 return ip.execute_script(input, ast, scriptpath=command_file)
137 finally:
138 ip.close()
139 finally:
140 if redirect is not None:
141 redirect.close()
142
143def sh(cwd=None, args=None, debugflags=None, env=None):
144 if args is None:
145 args = sys.argv[1:]
146 shargs, cmdargs = split_args(args)
147 options, shargs = SH_OPT.parse_args(shargs)
148
149 if options.profile:
150 import lsprof
151 p = lsprof.Profiler()
152 p.enable(subcalls=True)
153 try:
154 return _sh(cwd, shargs, cmdargs, options, debugflags, env)
155 finally:
156 p.disable()
157 stats = lsprof.Stats(p.getstats())
158 stats.sort()
159 stats.pprint(top=10, file=sys.stderr, climit=5)
160 else:
161 return _sh(cwd, shargs, cmdargs, options, debugflags, env)
162
163def main():
164 sys.exit(sh())
165
166if __name__=='__main__':
167 main()
diff --git a/bitbake/lib/bb/pysh/pyshlex.py b/bitbake/lib/bb/pysh/pyshlex.py
new file mode 100644
index 0000000000..b30123675c
--- /dev/null
+++ b/bitbake/lib/bb/pysh/pyshlex.py
@@ -0,0 +1,888 @@
1# pyshlex.py - PLY compatible lexer for pysh.
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8# TODO:
9# - review all "char in 'abc'" snippets: the empty string can be matched
10# - test line continuations within quoted/expansion strings
11# - eof is buggy wrt sublexers
12# - the lexer cannot really work in pull mode as it would be required to run
13# PLY in pull mode. It was designed to work incrementally and it would not be
14# that hard to enable pull mode.
15import re
16try:
17 s = set()
18 del s
19except NameError:
20 from Set import Set as set
21
22from ply import lex
23from sherrors import *
24
25class NeedMore(Exception):
26 pass
27
28def is_blank(c):
29 return c in (' ', '\t')
30
31_RE_DIGITS = re.compile(r'^\d+$')
32
33def are_digits(s):
34 return _RE_DIGITS.search(s) is not None
35
36_OPERATORS = dict([
37 ('&&', 'AND_IF'),
38 ('||', 'OR_IF'),
39 (';;', 'DSEMI'),
40 ('<<', 'DLESS'),
41 ('>>', 'DGREAT'),
42 ('<&', 'LESSAND'),
43 ('>&', 'GREATAND'),
44 ('<>', 'LESSGREAT'),
45 ('<<-', 'DLESSDASH'),
46 ('>|', 'CLOBBER'),
47 ('&', 'AMP'),
48 (';', 'COMMA'),
49 ('<', 'LESS'),
50 ('>', 'GREATER'),
51 ('(', 'LPARENS'),
52 (')', 'RPARENS'),
53])
54
55#Make a function to silence pychecker "Local variable shadows global"
56def make_partial_ops():
57 partials = {}
58 for k in _OPERATORS:
59 for i in range(1, len(k)+1):
60 partials[k[:i]] = None
61 return partials
62
63_PARTIAL_OPERATORS = make_partial_ops()
64
65def is_partial_op(s):
66 """Return True if s matches a non-empty subpart of an operator starting
67 at its first character.
68 """
69 return s in _PARTIAL_OPERATORS
70
71def is_op(s):
72 """If s matches an operator, returns the operator identifier. Return None
73 otherwise.
74 """
75 return _OPERATORS.get(s)
76
77_RESERVEDS = dict([
78 ('if', 'If'),
79 ('then', 'Then'),
80 ('else', 'Else'),
81 ('elif', 'Elif'),
82 ('fi', 'Fi'),
83 ('do', 'Do'),
84 ('done', 'Done'),
85 ('case', 'Case'),
86 ('esac', 'Esac'),
87 ('while', 'While'),
88 ('until', 'Until'),
89 ('for', 'For'),
90 ('{', 'Lbrace'),
91 ('}', 'Rbrace'),
92 ('!', 'Bang'),
93 ('in', 'In'),
94 ('|', 'PIPE'),
95])
96
97def get_reserved(s):
98 return _RESERVEDS.get(s)
99
100_RE_NAME = re.compile(r'^[0-9a-zA-Z_]+$')
101
102def is_name(s):
103 return _RE_NAME.search(s) is not None
104
105def find_chars(seq, chars):
106 for i,v in enumerate(seq):
107 if v in chars:
108 return i,v
109 return -1, None
110
111class WordLexer:
112 """WordLexer parse quoted or expansion expressions and return an expression
113 tree. The input string can be any well formed sequence beginning with quoting
114 or expansion character. Embedded expressions are handled recursively. The
115 resulting tree is made of lists and strings. Lists represent quoted or
116 expansion expressions. Each list first element is the opening separator,
117 the last one the closing separator. In-between can be any number of strings
118 or lists for sub-expressions. Non quoted/expansion expression can written as
119 strings or as lists with empty strings as starting and ending delimiters.
120 """
121
122 NAME_CHARSET = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
123 NAME_CHARSET = dict(zip(NAME_CHARSET, NAME_CHARSET))
124
125 SPECIAL_CHARSET = '@*#?-$!0'
126
127 #Characters which can be escaped depends on the current delimiters
128 ESCAPABLE = {
129 '`': set(['$', '\\', '`']),
130 '"': set(['$', '\\', '`', '"']),
131 "'": set(),
132 }
133
134 def __init__(self, heredoc = False):
135 # _buffer is the unprocessed input characters buffer
136 self._buffer = []
137 # _stack is empty or contains a quoted list being processed
138 # (this is the DFS path to the quoted expression being evaluated).
139 self._stack = []
140 self._escapable = None
141 # True when parsing unquoted here documents
142 self._heredoc = heredoc
143
144 def add(self, data, eof=False):
145 """Feed the lexer with more data. If the quoted expression can be
146 delimited, return a tuple (expr, remaining) containing the expression
147 tree and the unconsumed data.
148 Otherwise, raise NeedMore.
149 """
150 self._buffer += list(data)
151 self._parse(eof)
152
153 result = self._stack[0]
154 remaining = ''.join(self._buffer)
155 self._stack = []
156 self._buffer = []
157 return result, remaining
158
159 def _is_escapable(self, c, delim=None):
160 if delim is None:
161 if self._heredoc:
162 # Backslashes works as if they were double quoted in unquoted
163 # here-documents
164 delim = '"'
165 else:
166 if len(self._stack)<=1:
167 return True
168 delim = self._stack[-2][0]
169
170 escapables = self.ESCAPABLE.get(delim, None)
171 return escapables is None or c in escapables
172
173 def _parse_squote(self, buf, result, eof):
174 if not buf:
175 raise NeedMore()
176 try:
177 pos = buf.index("'")
178 except ValueError:
179 raise NeedMore()
180 result[-1] += ''.join(buf[:pos])
181 result += ["'"]
182 return pos+1, True
183
184 def _parse_bquote(self, buf, result, eof):
185 if not buf:
186 raise NeedMore()
187
188 if buf[0]=='\n':
189 #Remove line continuations
190 result[:] = ['', '', '']
191 elif self._is_escapable(buf[0]):
192 result[-1] += buf[0]
193 result += ['']
194 else:
195 #Keep as such
196 result[:] = ['', '\\'+buf[0], '']
197
198 return 1, True
199
200 def _parse_dquote(self, buf, result, eof):
201 if not buf:
202 raise NeedMore()
203 pos, sep = find_chars(buf, '$\\`"')
204 if pos==-1:
205 raise NeedMore()
206
207 result[-1] += ''.join(buf[:pos])
208 if sep=='"':
209 result += ['"']
210 return pos+1, True
211 else:
212 #Keep everything until the separator and defer processing
213 return pos, False
214
215 def _parse_command(self, buf, result, eof):
216 if not buf:
217 raise NeedMore()
218
219 chars = '$\\`"\''
220 if result[0] == '$(':
221 chars += ')'
222 pos, sep = find_chars(buf, chars)
223 if pos == -1:
224 raise NeedMore()
225
226 result[-1] += ''.join(buf[:pos])
227 if (result[0]=='$(' and sep==')') or (result[0]=='`' and sep=='`'):
228 result += [sep]
229 return pos+1, True
230 else:
231 return pos, False
232
233 def _parse_parameter(self, buf, result, eof):
234 if not buf:
235 raise NeedMore()
236
237 pos, sep = find_chars(buf, '$\\`"\'}')
238 if pos==-1:
239 raise NeedMore()
240
241 result[-1] += ''.join(buf[:pos])
242 if sep=='}':
243 result += [sep]
244 return pos+1, True
245 else:
246 return pos, False
247
248 def _parse_dollar(self, buf, result, eof):
249 sep = result[0]
250 if sep=='$':
251 if not buf:
252 #TODO: handle empty $
253 raise NeedMore()
254 if buf[0]=='(':
255 if len(buf)==1:
256 raise NeedMore()
257
258 if buf[1]=='(':
259 result[0] = '$(('
260 buf[:2] = []
261 else:
262 result[0] = '$('
263 buf[:1] = []
264
265 elif buf[0]=='{':
266 result[0] = '${'
267 buf[:1] = []
268 else:
269 if buf[0] in self.SPECIAL_CHARSET:
270 result[-1] = buf[0]
271 read = 1
272 else:
273 for read,c in enumerate(buf):
274 if c not in self.NAME_CHARSET:
275 break
276 else:
277 if not eof:
278 raise NeedMore()
279 read += 1
280
281 result[-1] += ''.join(buf[0:read])
282
283 if not result[-1]:
284 result[:] = ['', result[0], '']
285 else:
286 result += ['']
287 return read,True
288
289 sep = result[0]
290 if sep=='$(':
291 parsefunc = self._parse_command
292 elif sep=='${':
293 parsefunc = self._parse_parameter
294 else:
295 raise NotImplementedError(sep)
296
297 pos, closed = parsefunc(buf, result, eof)
298 return pos, closed
299
300 def _parse(self, eof):
301 buf = self._buffer
302 stack = self._stack
303 recurse = False
304
305 while 1:
306 if not stack or recurse:
307 if not buf:
308 raise NeedMore()
309 if buf[0] not in ('"\\`$\''):
310 raise ShellSyntaxError('Invalid quoted string sequence')
311 stack.append([buf[0], ''])
312 buf[:1] = []
313 recurse = False
314
315 result = stack[-1]
316 if result[0]=="'":
317 parsefunc = self._parse_squote
318 elif result[0]=='\\':
319 parsefunc = self._parse_bquote
320 elif result[0]=='"':
321 parsefunc = self._parse_dquote
322 elif result[0]=='`':
323 parsefunc = self._parse_command
324 elif result[0][0]=='$':
325 parsefunc = self._parse_dollar
326 else:
327 raise NotImplementedError()
328
329 read, closed = parsefunc(buf, result, eof)
330
331 buf[:read] = []
332 if closed:
333 if len(stack)>1:
334 #Merge in parent expression
335 parsed = stack.pop()
336 stack[-1] += [parsed]
337 stack[-1] += ['']
338 else:
339 break
340 else:
341 recurse = True
342
343def normalize_wordtree(wtree):
344 """Fold back every literal sequence (delimited with empty strings) into
345 parent sequence.
346 """
347 def normalize(wtree):
348 result = []
349 for part in wtree[1:-1]:
350 if isinstance(part, list):
351 part = normalize(part)
352 if part[0]=='':
353 #Move the part content back at current level
354 result += part[1:-1]
355 continue
356 elif not part:
357 #Remove empty strings
358 continue
359 result.append(part)
360 if not result:
361 result = ['']
362 return [wtree[0]] + result + [wtree[-1]]
363
364 return normalize(wtree)
365
366
367def make_wordtree(token, here_document=False):
368 """Parse a delimited token and return a tree similar to the ones returned by
369 WordLexer. token may contain any combinations of expansion/quoted fields and
370 non-ones.
371 """
372 tree = ['']
373 remaining = token
374 delimiters = '\\$`'
375 if not here_document:
376 delimiters += '\'"'
377
378 while 1:
379 pos, sep = find_chars(remaining, delimiters)
380 if pos==-1:
381 tree += [remaining, '']
382 return normalize_wordtree(tree)
383 tree.append(remaining[:pos])
384 remaining = remaining[pos:]
385
386 try:
387 result, remaining = WordLexer(heredoc = here_document).add(remaining, True)
388 except NeedMore:
389 raise ShellSyntaxError('Invalid token "%s"')
390 tree.append(result)
391
392
393def wordtree_as_string(wtree):
394 """Rewrite an expression tree generated by make_wordtree as string."""
395 def visit(node, output):
396 for child in node:
397 if isinstance(child, list):
398 visit(child, output)
399 else:
400 output.append(child)
401
402 output = []
403 visit(wtree, output)
404 return ''.join(output)
405
406
407def unquote_wordtree(wtree):
408 """Fold the word tree while removing quotes everywhere. Other expansion
409 sequences are joined as such.
410 """
411 def unquote(wtree):
412 unquoted = []
413 if wtree[0] in ('', "'", '"', '\\'):
414 wtree = wtree[1:-1]
415
416 for part in wtree:
417 if isinstance(part, list):
418 part = unquote(part)
419 unquoted.append(part)
420 return ''.join(unquoted)
421
422 return unquote(wtree)
423
424
425class HereDocLexer:
426 """HereDocLexer delimits whatever comes from the here-document starting newline
427 not included to the closing delimiter line included.
428 """
429 def __init__(self, op, delim):
430 assert op in ('<<', '<<-')
431 if not delim:
432 raise ShellSyntaxError('invalid here document delimiter %s' % str(delim))
433
434 self._op = op
435 self._delim = delim
436 self._buffer = []
437 self._token = []
438
439 def add(self, data, eof):
440 """If the here-document was delimited, return a tuple (content, remaining).
441 Raise NeedMore() otherwise.
442 """
443 self._buffer += list(data)
444 self._parse(eof)
445 token = ''.join(self._token)
446 remaining = ''.join(self._buffer)
447 self._token, self._remaining = [], []
448 return token, remaining
449
450 def _parse(self, eof):
451 while 1:
452 #Look for first unescaped newline. Quotes may be ignored
453 escaped = False
454 for i,c in enumerate(self._buffer):
455 if escaped:
456 escaped = False
457 elif c=='\\':
458 escaped = True
459 elif c=='\n':
460 break
461 else:
462 i = -1
463
464 if i==-1 or self._buffer[i]!='\n':
465 if not eof:
466 raise NeedMore()
467 #No more data, maybe the last line is closing delimiter
468 line = ''.join(self._buffer)
469 eol = ''
470 self._buffer[:] = []
471 else:
472 line = ''.join(self._buffer[:i])
473 eol = self._buffer[i]
474 self._buffer[:i+1] = []
475
476 if self._op=='<<-':
477 line = line.lstrip('\t')
478
479 if line==self._delim:
480 break
481
482 self._token += [line, eol]
483 if i==-1:
484 break
485
486class Token:
487 #TODO: check this is still in use
488 OPERATOR = 'OPERATOR'
489 WORD = 'WORD'
490
491 def __init__(self):
492 self.value = ''
493 self.type = None
494
495 def __getitem__(self, key):
496 #Behave like a two elements tuple
497 if key==0:
498 return self.type
499 if key==1:
500 return self.value
501 raise IndexError(key)
502
503
504class HereDoc:
505 def __init__(self, op, name=None):
506 self.op = op
507 self.name = name
508 self.pendings = []
509
510TK_COMMA = 'COMMA'
511TK_AMPERSAND = 'AMP'
512TK_OP = 'OP'
513TK_TOKEN = 'TOKEN'
514TK_COMMENT = 'COMMENT'
515TK_NEWLINE = 'NEWLINE'
516TK_IONUMBER = 'IO_NUMBER'
517TK_ASSIGNMENT = 'ASSIGNMENT_WORD'
518TK_HERENAME = 'HERENAME'
519
520class Lexer:
521 """Main lexer.
522
523 Call add() until the script AST is returned.
524 """
525 # Here-document handling makes the whole thing more complex because they basically
526 # force tokens to be reordered: here-content must come right after the operator
527 # and the here-document name, while some other tokens might be following the
528 # here-document expression on the same line.
529 #
530 # So, here-doc states are basically:
531 # *self._state==ST_NORMAL
532 # - self._heredoc.op is None: no here-document
533 # - self._heredoc.op is not None but name is: here-document operator matched,
534 # waiting for the document name/delimiter
535 # - self._heredoc.op and name are not None: here-document is ready, following
536 # tokens are being stored and will be pushed again when the document is
537 # completely parsed.
538 # *self._state==ST_HEREDOC
539 # - The here-document is being delimited by self._herelexer. Once it is done
540 # the content is pushed in front of the pending token list then all these
541 # tokens are pushed once again.
542 ST_NORMAL = 'ST_NORMAL'
543 ST_OP = 'ST_OP'
544 ST_BACKSLASH = 'ST_BACKSLASH'
545 ST_QUOTED = 'ST_QUOTED'
546 ST_COMMENT = 'ST_COMMENT'
547 ST_HEREDOC = 'ST_HEREDOC'
548
549 #Match end of backquote strings
550 RE_BACKQUOTE_END = re.compile(r'(?<!\\)(`)')
551
552 def __init__(self, parent_state = None):
553 self._input = []
554 self._pos = 0
555
556 self._token = ''
557 self._type = TK_TOKEN
558
559 self._state = self.ST_NORMAL
560 self._parent_state = parent_state
561 self._wordlexer = None
562
563 self._heredoc = HereDoc(None)
564 self._herelexer = None
565
566 ### Following attributes are not used for delimiting token and can safely
567 ### be changed after here-document detection (see _push_toke)
568
569 # Count the number of tokens following a 'For' reserved word. Needed to
570 # return an 'In' reserved word if it comes in third place.
571 self._for_count = None
572
573 def add(self, data, eof=False):
574 """Feed the lexer with data.
575
576 When eof is set to True, returns unconsumed data or raise if the lexer
577 is in the middle of a delimiting operation.
578 Raise NeedMore otherwise.
579 """
580 self._input += list(data)
581 self._parse(eof)
582 self._input[:self._pos] = []
583 return ''.join(self._input)
584
585 def _parse(self, eof):
586 while self._state:
587 if self._pos>=len(self._input):
588 if not eof:
589 raise NeedMore()
590 elif self._state not in (self.ST_OP, self.ST_QUOTED, self.ST_HEREDOC):
591 #Delimit the current token and leave cleanly
592 self._push_token('')
593 break
594 else:
595 #Let the sublexer handle the eof themselves
596 pass
597
598 if self._state==self.ST_NORMAL:
599 self._parse_normal()
600 elif self._state==self.ST_COMMENT:
601 self._parse_comment()
602 elif self._state==self.ST_OP:
603 self._parse_op(eof)
604 elif self._state==self.ST_QUOTED:
605 self._parse_quoted(eof)
606 elif self._state==self.ST_HEREDOC:
607 self._parse_heredoc(eof)
608 else:
609 assert False, "Unknown state " + str(self._state)
610
611 if self._heredoc.op is not None:
612 raise ShellSyntaxError('missing here-document delimiter')
613
614 def _parse_normal(self):
615 c = self._input[self._pos]
616 if c=='\n':
617 self._push_token(c)
618 self._token = c
619 self._type = TK_NEWLINE
620 self._push_token('')
621 self._pos += 1
622 elif c in ('\\', '\'', '"', '`', '$'):
623 self._state = self.ST_QUOTED
624 elif is_partial_op(c):
625 self._push_token(c)
626
627 self._type = TK_OP
628 self._token += c
629 self._pos += 1
630 self._state = self.ST_OP
631 elif is_blank(c):
632 self._push_token(c)
633
634 #Discard blanks
635 self._pos += 1
636 elif self._token:
637 self._token += c
638 self._pos += 1
639 elif c=='#':
640 self._state = self.ST_COMMENT
641 self._type = TK_COMMENT
642 self._pos += 1
643 else:
644 self._pos += 1
645 self._token += c
646
647 def _parse_op(self, eof):
648 assert self._token
649
650 while 1:
651 if self._pos>=len(self._input):
652 if not eof:
653 raise NeedMore()
654 c = ''
655 else:
656 c = self._input[self._pos]
657
658 op = self._token + c
659 if c and is_partial_op(op):
660 #Still parsing an operator
661 self._token = op
662 self._pos += 1
663 else:
664 #End of operator
665 self._push_token(c)
666 self._state = self.ST_NORMAL
667 break
668
669 def _parse_comment(self):
670 while 1:
671 if self._pos>=len(self._input):
672 raise NeedMore()
673
674 c = self._input[self._pos]
675 if c=='\n':
676 #End of comment, do not consume the end of line
677 self._state = self.ST_NORMAL
678 break
679 else:
680 self._token += c
681 self._pos += 1
682
683 def _parse_quoted(self, eof):
684 """Precondition: the starting backquote/dollar is still in the input queue."""
685 if not self._wordlexer:
686 self._wordlexer = WordLexer()
687
688 if self._pos<len(self._input):
689 #Transfer input queue character into the subparser
690 input = self._input[self._pos:]
691 self._pos += len(input)
692
693 wtree, remaining = self._wordlexer.add(input, eof)
694 self._wordlexer = None
695 self._token += wordtree_as_string(wtree)
696
697 #Put unparsed character back in the input queue
698 if remaining:
699 self._input[self._pos:self._pos] = list(remaining)
700 self._state = self.ST_NORMAL
701
702 def _parse_heredoc(self, eof):
703 assert not self._token
704
705 if self._herelexer is None:
706 self._herelexer = HereDocLexer(self._heredoc.op, self._heredoc.name)
707
708 if self._pos<len(self._input):
709 #Transfer input queue character into the subparser
710 input = self._input[self._pos:]
711 self._pos += len(input)
712
713 self._token, remaining = self._herelexer.add(input, eof)
714
715 #Reset here-document state
716 self._herelexer = None
717 heredoc, self._heredoc = self._heredoc, HereDoc(None)
718 if remaining:
719 self._input[self._pos:self._pos] = list(remaining)
720 self._state = self.ST_NORMAL
721
722 #Push pending tokens
723 heredoc.pendings[:0] = [(self._token, self._type, heredoc.name)]
724 for token, type, delim in heredoc.pendings:
725 self._token = token
726 self._type = type
727 self._push_token(delim)
728
729 def _push_token(self, delim):
730 if not self._token:
731 return 0
732
733 if self._heredoc.op is not None:
734 if self._heredoc.name is None:
735 #Here-document name
736 if self._type!=TK_TOKEN:
737 raise ShellSyntaxError("expecting here-document name, got '%s'" % self._token)
738 self._heredoc.name = unquote_wordtree(make_wordtree(self._token))
739 self._type = TK_HERENAME
740 else:
741 #Capture all tokens until the newline starting the here-document
742 if self._type==TK_NEWLINE:
743 assert self._state==self.ST_NORMAL
744 self._state = self.ST_HEREDOC
745
746 self._heredoc.pendings.append((self._token, self._type, delim))
747 self._token = ''
748 self._type = TK_TOKEN
749 return 1
750
751 # BEWARE: do not change parser state from here to the end of the function:
752 # when parsing between an here-document operator to the end of the line
753 # tokens are stored in self._heredoc.pendings. Therefore, they will not
754 # reach the section below.
755
756 #Check operators
757 if self._type==TK_OP:
758 #False positive because of partial op matching
759 op = is_op(self._token)
760 if not op:
761 self._type = TK_TOKEN
762 else:
763 #Map to the specific operator
764 self._type = op
765 if self._token in ('<<', '<<-'):
766 #Done here rather than in _parse_op because there is no need
767 #to change the parser state since we are still waiting for
768 #the here-document name
769 if self._heredoc.op is not None:
770 raise ShellSyntaxError("syntax error near token '%s'" % self._token)
771 assert self._heredoc.op is None
772 self._heredoc.op = self._token
773
774 if self._type==TK_TOKEN:
775 if '=' in self._token and not delim:
776 if self._token.startswith('='):
777 #Token is a WORD... a TOKEN that is.
778 pass
779 else:
780 prev = self._token[:self._token.find('=')]
781 if is_name(prev):
782 self._type = TK_ASSIGNMENT
783 else:
784 #Just a token (unspecified)
785 pass
786 else:
787 reserved = get_reserved(self._token)
788 if reserved is not None:
789 if reserved=='In' and self._for_count!=2:
790 #Sorry, not a reserved word after all
791 pass
792 else:
793 self._type = reserved
794 if reserved in ('For', 'Case'):
795 self._for_count = 0
796 elif are_digits(self._token) and delim in ('<', '>'):
797 #Detect IO_NUMBER
798 self._type = TK_IONUMBER
799 elif self._token==';':
800 self._type = TK_COMMA
801 elif self._token=='&':
802 self._type = TK_AMPERSAND
803 elif self._type==TK_COMMENT:
804 #Comments are not part of sh grammar, ignore them
805 self._token = ''
806 self._type = TK_TOKEN
807 return 0
808
809 if self._for_count is not None:
810 #Track token count in 'For' expression to detect 'In' reserved words.
811 #Can only be in third position, no need to go beyond
812 self._for_count += 1
813 if self._for_count==3:
814 self._for_count = None
815
816 self.on_token((self._token, self._type))
817 self._token = ''
818 self._type = TK_TOKEN
819 return 1
820
821 def on_token(self, token):
822 raise NotImplementedError
823
824
825tokens = [
826 TK_TOKEN,
827# To silence yacc unused token warnings
828# TK_COMMENT,
829 TK_NEWLINE,
830 TK_IONUMBER,
831 TK_ASSIGNMENT,
832 TK_HERENAME,
833]
834
835#Add specific operators
836tokens += _OPERATORS.values()
837#Add reserved words
838tokens += _RESERVEDS.values()
839
840class PLYLexer(Lexer):
841 """Bridge Lexer and PLY lexer interface."""
842 def __init__(self):
843 Lexer.__init__(self)
844 self._tokens = []
845 self._current = 0
846 self.lineno = 0
847
848 def on_token(self, token):
849 value, type = token
850
851 self.lineno = 0
852 t = lex.LexToken()
853 t.value = value
854 t.type = type
855 t.lexer = self
856 t.lexpos = 0
857 t.lineno = 0
858
859 self._tokens.append(t)
860
861 def is_empty(self):
862 return not bool(self._tokens)
863
864 #PLY compliant interface
865 def token(self):
866 if self._current>=len(self._tokens):
867 return None
868 t = self._tokens[self._current]
869 self._current += 1
870 return t
871
872
873def get_tokens(s):
874 """Parse the input string and return a tuple (tokens, unprocessed) where
875 tokens is a list of parsed tokens and unprocessed is the part of the input
876 string left untouched by the lexer.
877 """
878 lexer = PLYLexer()
879 untouched = lexer.add(s, True)
880 tokens = []
881 while 1:
882 token = lexer.token()
883 if token is None:
884 break
885 tokens.append(token)
886
887 tokens = [(t.value, t.type) for t in tokens]
888 return tokens, untouched
diff --git a/bitbake/lib/bb/pysh/pyshyacc.py b/bitbake/lib/bb/pysh/pyshyacc.py
new file mode 100644
index 0000000000..e8e80aac45
--- /dev/null
+++ b/bitbake/lib/bb/pysh/pyshyacc.py
@@ -0,0 +1,779 @@
1# pyshyacc.py - PLY grammar definition for pysh
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8"""PLY grammar file.
9"""
10import os.path
11import sys
12
13import pyshlex
14tokens = pyshlex.tokens
15
16from ply import yacc
17import sherrors
18
19class IORedirect:
20 def __init__(self, op, filename, io_number=None):
21 self.op = op
22 self.filename = filename
23 self.io_number = io_number
24
25class HereDocument:
26 def __init__(self, op, name, content, io_number=None):
27 self.op = op
28 self.name = name
29 self.content = content
30 self.io_number = io_number
31
32def make_io_redirect(p):
33 """Make an IORedirect instance from the input 'io_redirect' production."""
34 name, io_number, io_target = p
35 assert name=='io_redirect'
36
37 if io_target[0]=='io_file':
38 io_type, io_op, io_file = io_target
39 return IORedirect(io_op, io_file, io_number)
40 elif io_target[0]=='io_here':
41 io_type, io_op, io_name, io_content = io_target
42 return HereDocument(io_op, io_name, io_content, io_number)
43 else:
44 assert False, "Invalid IO redirection token %s" % repr(io_type)
45
46class SimpleCommand:
47 """
48 assigns contains (name, value) pairs.
49 """
50 def __init__(self, words, redirs, assigns):
51 self.words = list(words)
52 self.redirs = list(redirs)
53 self.assigns = list(assigns)
54
55class Pipeline:
56 def __init__(self, commands, reverse_status=False):
57 self.commands = list(commands)
58 assert self.commands #Grammar forbids this
59 self.reverse_status = reverse_status
60
61class AndOr:
62 def __init__(self, op, left, right):
63 self.op = str(op)
64 self.left = left
65 self.right = right
66
67class ForLoop:
68 def __init__(self, name, items, cmds):
69 self.name = str(name)
70 self.items = list(items)
71 self.cmds = list(cmds)
72
73class WhileLoop:
74 def __init__(self, condition, cmds):
75 self.condition = list(condition)
76 self.cmds = list(cmds)
77
78class UntilLoop:
79 def __init__(self, condition, cmds):
80 self.condition = list(condition)
81 self.cmds = list(cmds)
82
83class FunDef:
84 def __init__(self, name, body):
85 self.name = str(name)
86 self.body = body
87
88class BraceGroup:
89 def __init__(self, cmds):
90 self.cmds = list(cmds)
91
92class IfCond:
93 def __init__(self, cond, if_cmds, else_cmds):
94 self.cond = list(cond)
95 self.if_cmds = if_cmds
96 self.else_cmds = else_cmds
97
98class Case:
99 def __init__(self, name, items):
100 self.name = name
101 self.items = items
102
103class SubShell:
104 def __init__(self, cmds):
105 self.cmds = cmds
106
107class RedirectList:
108 def __init__(self, cmd, redirs):
109 self.cmd = cmd
110 self.redirs = list(redirs)
111
112def get_production(productions, ptype):
113 """productions must be a list of production tuples like (name, obj) where
114 name is the production string identifier.
115 Return the first production named 'ptype'. Raise KeyError if None can be
116 found.
117 """
118 for production in productions:
119 if production is not None and production[0]==ptype:
120 return production
121 raise KeyError(ptype)
122
123#-------------------------------------------------------------------------------
124# PLY grammar definition
125#-------------------------------------------------------------------------------
126
127def p_multiple_commands(p):
128 """multiple_commands : newline_sequence
129 | complete_command
130 | multiple_commands complete_command"""
131 if len(p)==2:
132 if p[1] is not None:
133 p[0] = [p[1]]
134 else:
135 p[0] = []
136 else:
137 p[0] = p[1] + [p[2]]
138
139def p_complete_command(p):
140 """complete_command : list separator
141 | list"""
142 if len(p)==3 and p[2] and p[2][1] == '&':
143 p[0] = ('async', p[1])
144 else:
145 p[0] = p[1]
146
147def p_list(p):
148 """list : list separator_op and_or
149 | and_or"""
150 if len(p)==2:
151 p[0] = [p[1]]
152 else:
153 #if p[2]!=';':
154 # raise NotImplementedError('AND-OR list asynchronous execution is not implemented')
155 p[0] = p[1] + [p[3]]
156
157def p_and_or(p):
158 """and_or : pipeline
159 | and_or AND_IF linebreak pipeline
160 | and_or OR_IF linebreak pipeline"""
161 if len(p)==2:
162 p[0] = p[1]
163 else:
164 p[0] = ('and_or', AndOr(p[2], p[1], p[4]))
165
166def p_maybe_bang_word(p):
167 """maybe_bang_word : Bang"""
168 p[0] = ('maybe_bang_word', p[1])
169
170def p_pipeline(p):
171 """pipeline : pipe_sequence
172 | bang_word pipe_sequence"""
173 if len(p)==3:
174 p[0] = ('pipeline', Pipeline(p[2][1:], True))
175 else:
176 p[0] = ('pipeline', Pipeline(p[1][1:]))
177
178def p_pipe_sequence(p):
179 """pipe_sequence : command
180 | pipe_sequence PIPE linebreak command"""
181 if len(p)==2:
182 p[0] = ['pipe_sequence', p[1]]
183 else:
184 p[0] = p[1] + [p[4]]
185
186def p_command(p):
187 """command : simple_command
188 | compound_command
189 | compound_command redirect_list
190 | function_definition"""
191
192 if p[1][0] in ( 'simple_command',
193 'for_clause',
194 'while_clause',
195 'until_clause',
196 'case_clause',
197 'if_clause',
198 'function_definition',
199 'subshell',
200 'brace_group',):
201 if len(p) == 2:
202 p[0] = p[1]
203 else:
204 p[0] = ('redirect_list', RedirectList(p[1], p[2][1:]))
205 else:
206 raise NotImplementedError('%s command is not implemented' % repr(p[1][0]))
207
208def p_compound_command(p):
209 """compound_command : brace_group
210 | subshell
211 | for_clause
212 | case_clause
213 | if_clause
214 | while_clause
215 | until_clause"""
216 p[0] = p[1]
217
218def p_subshell(p):
219 """subshell : LPARENS compound_list RPARENS"""
220 p[0] = ('subshell', SubShell(p[2][1:]))
221
222def p_compound_list(p):
223 """compound_list : term
224 | newline_list term
225 | term separator
226 | newline_list term separator"""
227 productions = p[1:]
228 try:
229 sep = get_production(productions, 'separator')
230 if sep[1]!=';':
231 raise NotImplementedError()
232 except KeyError:
233 pass
234 term = get_production(productions, 'term')
235 p[0] = ['compound_list'] + term[1:]
236
237def p_term(p):
238 """term : term separator and_or
239 | and_or"""
240 if len(p)==2:
241 p[0] = ['term', p[1]]
242 else:
243 if p[2] is not None and p[2][1] == '&':
244 p[0] = ['term', ('async', p[1][1:])] + [p[3]]
245 else:
246 p[0] = p[1] + [p[3]]
247
248def p_maybe_for_word(p):
249 # Rearrange 'For' priority wrt TOKEN. See p_for_word
250 """maybe_for_word : For"""
251 p[0] = ('maybe_for_word', p[1])
252
253def p_for_clause(p):
254 """for_clause : for_word name linebreak do_group
255 | for_word name linebreak in sequential_sep do_group
256 | for_word name linebreak in wordlist sequential_sep do_group"""
257 productions = p[1:]
258 do_group = get_production(productions, 'do_group')
259 try:
260 items = get_production(productions, 'in')[1:]
261 except KeyError:
262 raise NotImplementedError('"in" omission is not implemented')
263
264 try:
265 items = get_production(productions, 'wordlist')[1:]
266 except KeyError:
267 items = []
268
269 name = p[2]
270 p[0] = ('for_clause', ForLoop(name, items, do_group[1:]))
271
272def p_name(p):
273 """name : token""" #Was NAME instead of token
274 p[0] = p[1]
275
276def p_in(p):
277 """in : In"""
278 p[0] = ('in', p[1])
279
280def p_wordlist(p):
281 """wordlist : wordlist token
282 | token"""
283 if len(p)==2:
284 p[0] = ['wordlist', ('TOKEN', p[1])]
285 else:
286 p[0] = p[1] + [('TOKEN', p[2])]
287
288def p_case_clause(p):
289 """case_clause : Case token linebreak in linebreak case_list Esac
290 | Case token linebreak in linebreak case_list_ns Esac
291 | Case token linebreak in linebreak Esac"""
292 if len(p) < 8:
293 items = []
294 else:
295 items = p[6][1:]
296 name = p[2]
297 p[0] = ('case_clause', Case(name, [c[1] for c in items]))
298
299def p_case_list_ns(p):
300 """case_list_ns : case_list case_item_ns
301 | case_item_ns"""
302 p_case_list(p)
303
304def p_case_list(p):
305 """case_list : case_list case_item
306 | case_item"""
307 if len(p)==2:
308 p[0] = ['case_list', p[1]]
309 else:
310 p[0] = p[1] + [p[2]]
311
312def p_case_item_ns(p):
313 """case_item_ns : pattern RPARENS linebreak
314 | pattern RPARENS compound_list linebreak
315 | LPARENS pattern RPARENS linebreak
316 | LPARENS pattern RPARENS compound_list linebreak"""
317 p_case_item(p)
318
319def p_case_item(p):
320 """case_item : pattern RPARENS linebreak DSEMI linebreak
321 | pattern RPARENS compound_list DSEMI linebreak
322 | LPARENS pattern RPARENS linebreak DSEMI linebreak
323 | LPARENS pattern RPARENS compound_list DSEMI linebreak"""
324 if len(p) < 7:
325 name = p[1][1:]
326 else:
327 name = p[2][1:]
328
329 try:
330 cmds = get_production(p[1:], "compound_list")[1:]
331 except KeyError:
332 cmds = []
333
334 p[0] = ('case_item', (name, cmds))
335
336def p_pattern(p):
337 """pattern : token
338 | pattern PIPE token"""
339 if len(p)==2:
340 p[0] = ['pattern', ('TOKEN', p[1])]
341 else:
342 p[0] = p[1] + [('TOKEN', p[2])]
343
344def p_maybe_if_word(p):
345 # Rearrange 'If' priority wrt TOKEN. See p_if_word
346 """maybe_if_word : If"""
347 p[0] = ('maybe_if_word', p[1])
348
349def p_maybe_then_word(p):
350 # Rearrange 'Then' priority wrt TOKEN. See p_then_word
351 """maybe_then_word : Then"""
352 p[0] = ('maybe_then_word', p[1])
353
354def p_if_clause(p):
355 """if_clause : if_word compound_list then_word compound_list else_part Fi
356 | if_word compound_list then_word compound_list Fi"""
357 else_part = []
358 if len(p)==7:
359 else_part = p[5]
360 p[0] = ('if_clause', IfCond(p[2][1:], p[4][1:], else_part))
361
362def p_else_part(p):
363 """else_part : Elif compound_list then_word compound_list else_part
364 | Elif compound_list then_word compound_list
365 | Else compound_list"""
366 if len(p)==3:
367 p[0] = p[2][1:]
368 else:
369 else_part = []
370 if len(p)==6:
371 else_part = p[5]
372 p[0] = ('elif', IfCond(p[2][1:], p[4][1:], else_part))
373
374def p_while_clause(p):
375 """while_clause : While compound_list do_group"""
376 p[0] = ('while_clause', WhileLoop(p[2][1:], p[3][1:]))
377
378def p_maybe_until_word(p):
379 # Rearrange 'Until' priority wrt TOKEN. See p_until_word
380 """maybe_until_word : Until"""
381 p[0] = ('maybe_until_word', p[1])
382
383def p_until_clause(p):
384 """until_clause : until_word compound_list do_group"""
385 p[0] = ('until_clause', UntilLoop(p[2][1:], p[3][1:]))
386
387def p_function_definition(p):
388 """function_definition : fname LPARENS RPARENS linebreak function_body"""
389 p[0] = ('function_definition', FunDef(p[1], p[5]))
390
391def p_function_body(p):
392 """function_body : compound_command
393 | compound_command redirect_list"""
394 if len(p)!=2:
395 raise NotImplementedError('functions redirections lists are not implemented')
396 p[0] = p[1]
397
398def p_fname(p):
399 """fname : TOKEN""" #Was NAME instead of token
400 p[0] = p[1]
401
402def p_brace_group(p):
403 """brace_group : Lbrace compound_list Rbrace"""
404 p[0] = ('brace_group', BraceGroup(p[2][1:]))
405
406def p_maybe_done_word(p):
407 #See p_assignment_word for details.
408 """maybe_done_word : Done"""
409 p[0] = ('maybe_done_word', p[1])
410
411def p_maybe_do_word(p):
412 """maybe_do_word : Do"""
413 p[0] = ('maybe_do_word', p[1])
414
415def p_do_group(p):
416 """do_group : do_word compound_list done_word"""
417 #Do group contains a list of AndOr
418 p[0] = ['do_group'] + p[2][1:]
419
420def p_simple_command(p):
421 """simple_command : cmd_prefix cmd_word cmd_suffix
422 | cmd_prefix cmd_word
423 | cmd_prefix
424 | cmd_name cmd_suffix
425 | cmd_name"""
426 words, redirs, assigns = [], [], []
427 for e in p[1:]:
428 name = e[0]
429 if name in ('cmd_prefix', 'cmd_suffix'):
430 for sube in e[1:]:
431 subname = sube[0]
432 if subname=='io_redirect':
433 redirs.append(make_io_redirect(sube))
434 elif subname=='ASSIGNMENT_WORD':
435 assigns.append(sube)
436 else:
437 words.append(sube)
438 elif name in ('cmd_word', 'cmd_name'):
439 words.append(e)
440
441 cmd = SimpleCommand(words, redirs, assigns)
442 p[0] = ('simple_command', cmd)
443
444def p_cmd_name(p):
445 """cmd_name : TOKEN"""
446 p[0] = ('cmd_name', p[1])
447
448def p_cmd_word(p):
449 """cmd_word : token"""
450 p[0] = ('cmd_word', p[1])
451
452def p_maybe_assignment_word(p):
453 #See p_assignment_word for details.
454 """maybe_assignment_word : ASSIGNMENT_WORD"""
455 p[0] = ('maybe_assignment_word', p[1])
456
457def p_cmd_prefix(p):
458 """cmd_prefix : io_redirect
459 | cmd_prefix io_redirect
460 | assignment_word
461 | cmd_prefix assignment_word"""
462 try:
463 prefix = get_production(p[1:], 'cmd_prefix')
464 except KeyError:
465 prefix = ['cmd_prefix']
466
467 try:
468 value = get_production(p[1:], 'assignment_word')[1]
469 value = ('ASSIGNMENT_WORD', value.split('=', 1))
470 except KeyError:
471 value = get_production(p[1:], 'io_redirect')
472 p[0] = prefix + [value]
473
474def p_cmd_suffix(p):
475 """cmd_suffix : io_redirect
476 | cmd_suffix io_redirect
477 | token
478 | cmd_suffix token
479 | maybe_for_word
480 | cmd_suffix maybe_for_word
481 | maybe_done_word
482 | cmd_suffix maybe_done_word
483 | maybe_do_word
484 | cmd_suffix maybe_do_word
485 | maybe_until_word
486 | cmd_suffix maybe_until_word
487 | maybe_assignment_word
488 | cmd_suffix maybe_assignment_word
489 | maybe_if_word
490 | cmd_suffix maybe_if_word
491 | maybe_then_word
492 | cmd_suffix maybe_then_word
493 | maybe_bang_word
494 | cmd_suffix maybe_bang_word"""
495 try:
496 suffix = get_production(p[1:], 'cmd_suffix')
497 token = p[2]
498 except KeyError:
499 suffix = ['cmd_suffix']
500 token = p[1]
501
502 if isinstance(token, tuple):
503 if token[0]=='io_redirect':
504 p[0] = suffix + [token]
505 else:
506 #Convert maybe_* to TOKEN if necessary
507 p[0] = suffix + [('TOKEN', token[1])]
508 else:
509 p[0] = suffix + [('TOKEN', token)]
510
511def p_redirect_list(p):
512 """redirect_list : io_redirect
513 | redirect_list io_redirect"""
514 if len(p) == 2:
515 p[0] = ['redirect_list', make_io_redirect(p[1])]
516 else:
517 p[0] = p[1] + [make_io_redirect(p[2])]
518
519def p_io_redirect(p):
520 """io_redirect : io_file
521 | IO_NUMBER io_file
522 | io_here
523 | IO_NUMBER io_here"""
524 if len(p)==3:
525 p[0] = ('io_redirect', p[1], p[2])
526 else:
527 p[0] = ('io_redirect', None, p[1])
528
529def p_io_file(p):
530 #Return the tuple (operator, filename)
531 """io_file : LESS filename
532 | LESSAND filename
533 | GREATER filename
534 | GREATAND filename
535 | DGREAT filename
536 | LESSGREAT filename
537 | CLOBBER filename"""
538 #Extract the filename from the file
539 p[0] = ('io_file', p[1], p[2][1])
540
541def p_filename(p):
542 #Return the filename
543 """filename : TOKEN"""
544 p[0] = ('filename', p[1])
545
546def p_io_here(p):
547 """io_here : DLESS here_end
548 | DLESSDASH here_end"""
549 p[0] = ('io_here', p[1], p[2][1], p[2][2])
550
551def p_here_end(p):
552 """here_end : HERENAME TOKEN"""
553 p[0] = ('here_document', p[1], p[2])
554
555def p_newline_sequence(p):
556 # Nothing in the grammar can handle leading NEWLINE productions, so add
557 # this one with the lowest possible priority relatively to newline_list.
558 """newline_sequence : newline_list"""
559 p[0] = None
560
561def p_newline_list(p):
562 """newline_list : NEWLINE
563 | newline_list NEWLINE"""
564 p[0] = None
565
566def p_linebreak(p):
567 """linebreak : newline_list
568 | empty"""
569 p[0] = None
570
571def p_separator_op(p):
572 """separator_op : COMMA
573 | AMP"""
574 p[0] = p[1]
575
576def p_separator(p):
577 """separator : separator_op linebreak
578 | newline_list"""
579 if len(p)==2:
580 #Ignore newlines
581 p[0] = None
582 else:
583 #Keep the separator operator
584 p[0] = ('separator', p[1])
585
586def p_sequential_sep(p):
587 """sequential_sep : COMMA linebreak
588 | newline_list"""
589 p[0] = None
590
591# Low priority TOKEN => for_word conversion.
592# Let maybe_for_word be used as a token when necessary in higher priority
593# rules.
594def p_for_word(p):
595 """for_word : maybe_for_word"""
596 p[0] = p[1]
597
598def p_if_word(p):
599 """if_word : maybe_if_word"""
600 p[0] = p[1]
601
602def p_then_word(p):
603 """then_word : maybe_then_word"""
604 p[0] = p[1]
605
606def p_done_word(p):
607 """done_word : maybe_done_word"""
608 p[0] = p[1]
609
610def p_do_word(p):
611 """do_word : maybe_do_word"""
612 p[0] = p[1]
613
614def p_until_word(p):
615 """until_word : maybe_until_word"""
616 p[0] = p[1]
617
618def p_assignment_word(p):
619 """assignment_word : maybe_assignment_word"""
620 p[0] = ('assignment_word', p[1][1])
621
622def p_bang_word(p):
623 """bang_word : maybe_bang_word"""
624 p[0] = ('bang_word', p[1][1])
625
626def p_token(p):
627 """token : TOKEN
628 | Fi"""
629 p[0] = p[1]
630
631def p_empty(p):
632 'empty :'
633 p[0] = None
634
635# Error rule for syntax errors
636def p_error(p):
637 msg = []
638 w = msg.append
639 w('%r\n' % p)
640 w('followed by:\n')
641 for i in range(5):
642 n = yacc.token()
643 if not n:
644 break
645 w(' %r\n' % n)
646 raise sherrors.ShellSyntaxError(''.join(msg))
647
648# Build the parser
649try:
650 import pyshtables
651except ImportError:
652 outputdir = os.path.dirname(__file__)
653 if not os.access(outputdir, os.W_OK):
654 outputdir = ''
655 yacc.yacc(tabmodule = 'pyshtables', outputdir = outputdir, debug = 0)
656else:
657 yacc.yacc(tabmodule = 'pysh.pyshtables', write_tables = 0, debug = 0)
658
659
660def parse(input, eof=False, debug=False):
661 """Parse a whole script at once and return the generated AST and unconsumed
662 data in a tuple.
663
664 NOTE: eof is probably meaningless for now, the parser being unable to work
665 in pull mode. It should be set to True.
666 """
667 lexer = pyshlex.PLYLexer()
668 remaining = lexer.add(input, eof)
669 if lexer.is_empty():
670 return [], remaining
671 if debug:
672 debug = 2
673 return yacc.parse(lexer=lexer, debug=debug), remaining
674
675#-------------------------------------------------------------------------------
676# AST rendering helpers
677#-------------------------------------------------------------------------------
678
679def format_commands(v):
680 """Return a tree made of strings and lists. Make command trees easier to
681 display.
682 """
683 if isinstance(v, list):
684 return [format_commands(c) for c in v]
685 if isinstance(v, tuple):
686 if len(v)==2 and isinstance(v[0], str) and not isinstance(v[1], str):
687 if v[0] == 'async':
688 return ['AsyncList', map(format_commands, v[1])]
689 else:
690 #Avoid decomposing tuples like ('pipeline', Pipeline(...))
691 return format_commands(v[1])
692 return format_commands(list(v))
693 elif isinstance(v, IfCond):
694 name = ['IfCond']
695 name += ['if', map(format_commands, v.cond)]
696 name += ['then', map(format_commands, v.if_cmds)]
697 name += ['else', map(format_commands, v.else_cmds)]
698 return name
699 elif isinstance(v, ForLoop):
700 name = ['ForLoop']
701 name += [repr(v.name)+' in ', map(str, v.items)]
702 name += ['commands', map(format_commands, v.cmds)]
703 return name
704 elif isinstance(v, AndOr):
705 return [v.op, format_commands(v.left), format_commands(v.right)]
706 elif isinstance(v, Pipeline):
707 name = 'Pipeline'
708 if v.reverse_status:
709 name = '!' + name
710 return [name, format_commands(v.commands)]
711 elif isinstance(v, Case):
712 name = ['Case']
713 name += [v.name, format_commands(v.items)]
714 elif isinstance(v, SimpleCommand):
715 name = ['SimpleCommand']
716 if v.words:
717 name += ['words', map(str, v.words)]
718 if v.assigns:
719 assigns = [tuple(a[1]) for a in v.assigns]
720 name += ['assigns', map(str, assigns)]
721 if v.redirs:
722 name += ['redirs', map(format_commands, v.redirs)]
723 return name
724 elif isinstance(v, RedirectList):
725 name = ['RedirectList']
726 if v.redirs:
727 name += ['redirs', map(format_commands, v.redirs)]
728 name += ['command', format_commands(v.cmd)]
729 return name
730 elif isinstance(v, IORedirect):
731 return ' '.join(map(str, (v.io_number, v.op, v.filename)))
732 elif isinstance(v, HereDocument):
733 return ' '.join(map(str, (v.io_number, v.op, repr(v.name), repr(v.content))))
734 elif isinstance(v, SubShell):
735 return ['SubShell', map(format_commands, v.cmds)]
736 else:
737 return repr(v)
738
739def print_commands(cmds, output=sys.stdout):
740 """Pretty print a command tree."""
741 def print_tree(cmd, spaces, output):
742 if isinstance(cmd, list):
743 for c in cmd:
744 print_tree(c, spaces + 3, output)
745 else:
746 print >>output, ' '*spaces + str(cmd)
747
748 formatted = format_commands(cmds)
749 print_tree(formatted, 0, output)
750
751
752def stringify_commands(cmds):
753 """Serialize a command tree as a string.
754
755 Returned string is not pretty and is currently used for unit tests only.
756 """
757 def stringify(value):
758 output = []
759 if isinstance(value, list):
760 formatted = []
761 for v in value:
762 formatted.append(stringify(v))
763 formatted = ' '.join(formatted)
764 output.append(''.join(['<', formatted, '>']))
765 else:
766 output.append(value)
767 return ' '.join(output)
768
769 return stringify(format_commands(cmds))
770
771
772def visit_commands(cmds, callable):
773 """Visit the command tree and execute callable on every Pipeline and
774 SimpleCommand instances.
775 """
776 if isinstance(cmds, (tuple, list)):
777 map(lambda c: visit_commands(c,callable), cmds)
778 elif isinstance(cmds, (Pipeline, SimpleCommand)):
779 callable(cmds)
diff --git a/bitbake/lib/bb/pysh/sherrors.py b/bitbake/lib/bb/pysh/sherrors.py
new file mode 100644
index 0000000000..1d5bd53b3a
--- /dev/null
+++ b/bitbake/lib/bb/pysh/sherrors.py
@@ -0,0 +1,41 @@
1# sherrors.py - shell errors and signals
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8"""Define shell exceptions and error codes.
9"""
10
11class ShellError(Exception):
12 pass
13
14class ShellSyntaxError(ShellError):
15 pass
16
17class UtilityError(ShellError):
18 """Raised upon utility syntax error (option or operand error)."""
19 pass
20
21class ExpansionError(ShellError):
22 pass
23
24class CommandNotFound(ShellError):
25 """Specified command was not found."""
26 pass
27
28class RedirectionError(ShellError):
29 pass
30
31class VarAssignmentError(ShellError):
32 """Variable assignment error."""
33 pass
34
35class ExitSignal(ShellError):
36 """Exit signal."""
37 pass
38
39class ReturnSignal(ShellError):
40 """Exit signal."""
41 pass \ No newline at end of file
diff --git a/bitbake/lib/bb/pysh/subprocess_fix.py b/bitbake/lib/bb/pysh/subprocess_fix.py
new file mode 100644
index 0000000000..46eca22802
--- /dev/null
+++ b/bitbake/lib/bb/pysh/subprocess_fix.py
@@ -0,0 +1,77 @@
1# subprocess - Subprocesses with accessible I/O streams
2#
3# For more information about this module, see PEP 324.
4#
5# This module should remain compatible with Python 2.2, see PEP 291.
6#
7# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
8#
9# Licensed to PSF under a Contributor Agreement.
10# See http://www.python.org/2.4/license for licensing details.
11
12def list2cmdline(seq):
13 """
14 Translate a sequence of arguments into a command line
15 string, using the same rules as the MS C runtime:
16
17 1) Arguments are delimited by white space, which is either a
18 space or a tab.
19
20 2) A string surrounded by double quotation marks is
21 interpreted as a single argument, regardless of white space
22 contained within. A quoted string can be embedded in an
23 argument.
24
25 3) A double quotation mark preceded by a backslash is
26 interpreted as a literal double quotation mark.
27
28 4) Backslashes are interpreted literally, unless they
29 immediately precede a double quotation mark.
30
31 5) If backslashes immediately precede a double quotation mark,
32 every pair of backslashes is interpreted as a literal
33 backslash. If the number of backslashes is odd, the last
34 backslash escapes the next double quotation mark as
35 described in rule 3.
36 """
37
38 # See
39 # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
40 result = []
41 needquote = False
42 for arg in seq:
43 bs_buf = []
44
45 # Add a space to separate this argument from the others
46 if result:
47 result.append(' ')
48
49 needquote = (" " in arg) or ("\t" in arg) or ("|" in arg) or arg == ""
50 if needquote:
51 result.append('"')
52
53 for c in arg:
54 if c == '\\':
55 # Don't know if we need to double yet.
56 bs_buf.append(c)
57 elif c == '"':
58 # Double backspaces.
59 result.append('\\' * len(bs_buf)*2)
60 bs_buf = []
61 result.append('\\"')
62 else:
63 # Normal char
64 if bs_buf:
65 result.extend(bs_buf)
66 bs_buf = []
67 result.append(c)
68
69 # Add remaining backspaces, if any.
70 if bs_buf:
71 result.extend(bs_buf)
72
73 if needquote:
74 result.extend(bs_buf)
75 result.append('"')
76
77 return ''.join(result)
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
new file mode 100644
index 0000000000..6372b65fd9
--- /dev/null
+++ b/bitbake/lib/bb/runqueue.py
@@ -0,0 +1,2154 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake 'RunQueue' implementation
6
7Handles preparation and execution of a queue of tasks
8"""
9
10# Copyright (C) 2006-2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import copy
26import os
27import sys
28import signal
29import stat
30import fcntl
31import errno
32import logging
33import re
34import bb
35from bb import msg, data, event
36from bb import monitordisk
37import subprocess
38
39try:
40 import cPickle as pickle
41except ImportError:
42 import pickle
43
44bblogger = logging.getLogger("BitBake")
45logger = logging.getLogger("BitBake.RunQueue")
46
47__find_md5__ = re.compile( r'(?i)(?<![a-z0-9])[a-f0-9]{32}(?![a-z0-9])' )
48
49class RunQueueStats:
50 """
51 Holds statistics on the tasks handled by the associated runQueue
52 """
53 def __init__(self, total):
54 self.completed = 0
55 self.skipped = 0
56 self.failed = 0
57 self.active = 0
58 self.total = total
59
60 def copy(self):
61 obj = self.__class__(self.total)
62 obj.__dict__.update(self.__dict__)
63 return obj
64
65 def taskFailed(self):
66 self.active = self.active - 1
67 self.failed = self.failed + 1
68
69 def taskCompleted(self, number = 1):
70 self.active = self.active - number
71 self.completed = self.completed + number
72
73 def taskSkipped(self, number = 1):
74 self.active = self.active + number
75 self.skipped = self.skipped + number
76
77 def taskActive(self):
78 self.active = self.active + 1
79
80# These values indicate the next step due to be run in the
81# runQueue state machine
82runQueuePrepare = 2
83runQueueSceneInit = 3
84runQueueSceneRun = 4
85runQueueRunInit = 5
86runQueueRunning = 6
87runQueueFailed = 7
88runQueueCleanUp = 8
89runQueueComplete = 9
90
91class RunQueueScheduler(object):
92 """
93 Control the order tasks are scheduled in.
94 """
95 name = "basic"
96
97 def __init__(self, runqueue, rqdata):
98 """
99 The default scheduler just returns the first buildable task (the
100 priority map is sorted by task numer)
101 """
102 self.rq = runqueue
103 self.rqdata = rqdata
104 self.numTasks = len(self.rqdata.runq_fnid)
105
106 self.prio_map = []
107 self.prio_map.extend(range(self.numTasks))
108
109 self.buildable = []
110 self.stamps = {}
111 for taskid in xrange(self.numTasks):
112 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[taskid]]
113 taskname = self.rqdata.runq_task[taskid]
114 self.stamps[taskid] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
115 if self.rq.runq_buildable[taskid] == 1:
116 self.buildable.append(taskid)
117
118 self.rev_prio_map = None
119
120 def next_buildable_task(self):
121 """
122 Return the id of the first task we find that is buildable
123 """
124 self.buildable = [x for x in self.buildable if not self.rq.runq_running[x] == 1]
125 if not self.buildable:
126 return None
127 if len(self.buildable) == 1:
128 taskid = self.buildable[0]
129 stamp = self.stamps[taskid]
130 if stamp not in self.rq.build_stamps.itervalues():
131 return taskid
132
133 if not self.rev_prio_map:
134 self.rev_prio_map = range(self.numTasks)
135 for taskid in xrange(self.numTasks):
136 self.rev_prio_map[self.prio_map[taskid]] = taskid
137
138 best = None
139 bestprio = None
140 for taskid in self.buildable:
141 prio = self.rev_prio_map[taskid]
142 if not bestprio or bestprio > prio:
143 stamp = self.stamps[taskid]
144 if stamp in self.rq.build_stamps.itervalues():
145 continue
146 bestprio = prio
147 best = taskid
148
149 return best
150
151 def next(self):
152 """
153 Return the id of the task we should build next
154 """
155 if self.rq.stats.active < self.rq.number_tasks:
156 return self.next_buildable_task()
157
158 def newbuilable(self, task):
159 self.buildable.append(task)
160
161class RunQueueSchedulerSpeed(RunQueueScheduler):
162 """
163 A scheduler optimised for speed. The priority map is sorted by task weight,
164 heavier weighted tasks (tasks needed by the most other tasks) are run first.
165 """
166 name = "speed"
167
168 def __init__(self, runqueue, rqdata):
169 """
170 The priority map is sorted by task weight.
171 """
172 RunQueueScheduler.__init__(self, runqueue, rqdata)
173
174 sortweight = sorted(copy.deepcopy(self.rqdata.runq_weight))
175 copyweight = copy.deepcopy(self.rqdata.runq_weight)
176 self.prio_map = []
177
178 for weight in sortweight:
179 idx = copyweight.index(weight)
180 self.prio_map.append(idx)
181 copyweight[idx] = -1
182
183 self.prio_map.reverse()
184
185class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
186 """
187 A scheduler optimised to complete .bb files are quickly as possible. The
188 priority map is sorted by task weight, but then reordered so once a given
189 .bb file starts to build, its completed as quickly as possible. This works
190 well where disk space is at a premium and classes like OE's rm_work are in
191 force.
192 """
193 name = "completion"
194
195 def __init__(self, runqueue, rqdata):
196 RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata)
197
198 #FIXME - whilst this groups all fnids together it does not reorder the
199 #fnid groups optimally.
200
201 basemap = copy.deepcopy(self.prio_map)
202 self.prio_map = []
203 while (len(basemap) > 0):
204 entry = basemap.pop(0)
205 self.prio_map.append(entry)
206 fnid = self.rqdata.runq_fnid[entry]
207 todel = []
208 for entry in basemap:
209 entry_fnid = self.rqdata.runq_fnid[entry]
210 if entry_fnid == fnid:
211 todel.append(basemap.index(entry))
212 self.prio_map.append(entry)
213 todel.reverse()
214 for idx in todel:
215 del basemap[idx]
216
217class RunQueueData:
218 """
219 BitBake Run Queue implementation
220 """
221 def __init__(self, rq, cooker, cfgData, dataCache, taskData, targets):
222 self.cooker = cooker
223 self.dataCache = dataCache
224 self.taskData = taskData
225 self.targets = targets
226 self.rq = rq
227 self.warn_multi_bb = False
228
229 self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", True) or ""
230 self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
231
232 self.reset()
233
234 def reset(self):
235 self.runq_fnid = []
236 self.runq_task = []
237 self.runq_depends = []
238 self.runq_revdeps = []
239 self.runq_hash = []
240
241 def runq_depends_names(self, ids):
242 import re
243 ret = []
244 for id in self.runq_depends[ids]:
245 nam = os.path.basename(self.get_user_idstring(id))
246 nam = re.sub("_[^,]*,", ",", nam)
247 ret.extend([nam])
248 return ret
249
250 def get_task_name(self, task):
251 return self.runq_task[task]
252
253 def get_task_file(self, task):
254 return self.taskData.fn_index[self.runq_fnid[task]]
255
256 def get_task_hash(self, task):
257 return self.runq_hash[task]
258
259 def get_user_idstring(self, task, task_name_suffix = ""):
260 fn = self.taskData.fn_index[self.runq_fnid[task]]
261 taskname = self.runq_task[task] + task_name_suffix
262 return "%s, %s" % (fn, taskname)
263
264 def get_task_id(self, fnid, taskname):
265 for listid in xrange(len(self.runq_fnid)):
266 if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
267 return listid
268 return None
269
270 def circular_depchains_handler(self, tasks):
271 """
272 Some tasks aren't buildable, likely due to circular dependency issues.
273 Identify the circular dependencies and print them in a user readable format.
274 """
275 from copy import deepcopy
276
277 valid_chains = []
278 explored_deps = {}
279 msgs = []
280
281 def chain_reorder(chain):
282 """
283 Reorder a dependency chain so the lowest task id is first
284 """
285 lowest = 0
286 new_chain = []
287 for entry in xrange(len(chain)):
288 if chain[entry] < chain[lowest]:
289 lowest = entry
290 new_chain.extend(chain[lowest:])
291 new_chain.extend(chain[:lowest])
292 return new_chain
293
294 def chain_compare_equal(chain1, chain2):
295 """
296 Compare two dependency chains and see if they're the same
297 """
298 if len(chain1) != len(chain2):
299 return False
300 for index in xrange(len(chain1)):
301 if chain1[index] != chain2[index]:
302 return False
303 return True
304
305 def chain_array_contains(chain, chain_array):
306 """
307 Return True if chain_array contains chain
308 """
309 for ch in chain_array:
310 if chain_compare_equal(ch, chain):
311 return True
312 return False
313
314 def find_chains(taskid, prev_chain):
315 prev_chain.append(taskid)
316 total_deps = []
317 total_deps.extend(self.runq_revdeps[taskid])
318 for revdep in self.runq_revdeps[taskid]:
319 if revdep in prev_chain:
320 idx = prev_chain.index(revdep)
321 # To prevent duplicates, reorder the chain to start with the lowest taskid
322 # and search through an array of those we've already printed
323 chain = prev_chain[idx:]
324 new_chain = chain_reorder(chain)
325 if not chain_array_contains(new_chain, valid_chains):
326 valid_chains.append(new_chain)
327 msgs.append("Dependency loop #%d found:\n" % len(valid_chains))
328 for dep in new_chain:
329 msgs.append(" Task %s (%s) (dependent Tasks %s)\n" % (dep, self.get_user_idstring(dep), self.runq_depends_names(dep)))
330 msgs.append("\n")
331 if len(valid_chains) > 10:
332 msgs.append("Aborted dependency loops search after 10 matches.\n")
333 return msgs
334 continue
335 scan = False
336 if revdep not in explored_deps:
337 scan = True
338 elif revdep in explored_deps[revdep]:
339 scan = True
340 else:
341 for dep in prev_chain:
342 if dep in explored_deps[revdep]:
343 scan = True
344 if scan:
345 find_chains(revdep, copy.deepcopy(prev_chain))
346 for dep in explored_deps[revdep]:
347 if dep not in total_deps:
348 total_deps.append(dep)
349
350 explored_deps[taskid] = total_deps
351
352 for task in tasks:
353 find_chains(task, [])
354
355 return msgs
356
357 def calculate_task_weights(self, endpoints):
358 """
359 Calculate a number representing the "weight" of each task. Heavier weighted tasks
360 have more dependencies and hence should be executed sooner for maximum speed.
361
362 This function also sanity checks the task list finding tasks that are not
363 possible to execute due to circular dependencies.
364 """
365
366 numTasks = len(self.runq_fnid)
367 weight = []
368 deps_left = []
369 task_done = []
370
371 for listid in xrange(numTasks):
372 task_done.append(False)
373 weight.append(0)
374 deps_left.append(len(self.runq_revdeps[listid]))
375
376 for listid in endpoints:
377 weight[listid] = 1
378 task_done[listid] = True
379
380 while True:
381 next_points = []
382 for listid in endpoints:
383 for revdep in self.runq_depends[listid]:
384 weight[revdep] = weight[revdep] + weight[listid]
385 deps_left[revdep] = deps_left[revdep] - 1
386 if deps_left[revdep] == 0:
387 next_points.append(revdep)
388 task_done[revdep] = True
389 endpoints = next_points
390 if len(next_points) == 0:
391 break
392
393 # Circular dependency sanity check
394 problem_tasks = []
395 for task in xrange(numTasks):
396 if task_done[task] is False or deps_left[task] != 0:
397 problem_tasks.append(task)
398 logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task))
399 logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[task], deps_left[task])
400
401 if problem_tasks:
402 message = "Unbuildable tasks were found.\n"
403 message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
404 message = message + "Identifying dependency loops (this may take a short while)...\n"
405 logger.error(message)
406
407 msgs = self.circular_depchains_handler(problem_tasks)
408
409 message = "\n"
410 for msg in msgs:
411 message = message + msg
412 bb.msg.fatal("RunQueue", message)
413
414 return weight
415
416 def prepare(self):
417 """
418 Turn a set of taskData into a RunQueue and compute data needed
419 to optimise the execution order.
420 """
421
422 runq_build = []
423 recursivetasks = {}
424 recursiveitasks = {}
425 recursivetasksselfref = set()
426
427 taskData = self.taskData
428
429 if len(taskData.tasks_name) == 0:
430 # Nothing to do
431 return 0
432
433 logger.info("Preparing runqueue")
434
435 # Step A - Work out a list of tasks to run
436 #
437 # Taskdata gives us a list of possible providers for every build and run
438 # target ordered by priority. It also gives information on each of those
439 # providers.
440 #
441 # To create the actual list of tasks to execute we fix the list of
442 # providers and then resolve the dependencies into task IDs. This
443 # process is repeated for each type of dependency (tdepends, deptask,
444 # rdeptast, recrdeptask, idepends).
445
446 def add_build_dependencies(depids, tasknames, depends):
447 for depid in depids:
448 # Won't be in build_targets if ASSUME_PROVIDED
449 if depid not in taskData.build_targets:
450 continue
451 depdata = taskData.build_targets[depid][0]
452 if depdata is None:
453 continue
454 for taskname in tasknames:
455 taskid = taskData.gettask_id_fromfnid(depdata, taskname)
456 if taskid is not None:
457 depends.add(taskid)
458
459 def add_runtime_dependencies(depids, tasknames, depends):
460 for depid in depids:
461 if depid not in taskData.run_targets:
462 continue
463 depdata = taskData.run_targets[depid][0]
464 if depdata is None:
465 continue
466 for taskname in tasknames:
467 taskid = taskData.gettask_id_fromfnid(depdata, taskname)
468 if taskid is not None:
469 depends.add(taskid)
470
471 def add_resolved_dependencies(depids, tasknames, depends):
472 for depid in depids:
473 for taskname in tasknames:
474 taskid = taskData.gettask_id_fromfnid(depid, taskname)
475 if taskid is not None:
476 depends.add(taskid)
477
478 for task in xrange(len(taskData.tasks_name)):
479 depends = set()
480 fnid = taskData.tasks_fnid[task]
481 fn = taskData.fn_index[fnid]
482 task_deps = self.dataCache.task_deps[fn]
483
484 #logger.debug(2, "Processing %s:%s", fn, taskData.tasks_name[task])
485
486 if fnid not in taskData.failed_fnids:
487
488 # Resolve task internal dependencies
489 #
490 # e.g. addtask before X after Y
491 depends = set(taskData.tasks_tdepends[task])
492
493 # Resolve 'deptask' dependencies
494 #
495 # e.g. do_sometask[deptask] = "do_someothertask"
496 # (makes sure sometask runs after someothertask of all DEPENDS)
497 if 'deptask' in task_deps and taskData.tasks_name[task] in task_deps['deptask']:
498 tasknames = task_deps['deptask'][taskData.tasks_name[task]].split()
499 add_build_dependencies(taskData.depids[fnid], tasknames, depends)
500
501 # Resolve 'rdeptask' dependencies
502 #
503 # e.g. do_sometask[rdeptask] = "do_someothertask"
504 # (makes sure sometask runs after someothertask of all RDEPENDS)
505 if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']:
506 tasknames = task_deps['rdeptask'][taskData.tasks_name[task]].split()
507 add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
508
509 # Resolve inter-task dependencies
510 #
511 # e.g. do_sometask[depends] = "targetname:do_someothertask"
512 # (makes sure sometask runs after targetname's someothertask)
513 idepends = taskData.tasks_idepends[task]
514 for (depid, idependtask) in idepends:
515 if depid in taskData.build_targets and not depid in taskData.failed_deps:
516 # Won't be in build_targets if ASSUME_PROVIDED
517 depdata = taskData.build_targets[depid][0]
518 if depdata is not None:
519 taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
520 if taskid is None:
521 bb.msg.fatal("RunQueue", "Task %s in %s depends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, taskData.fn_index[depdata]))
522 depends.add(taskid)
523 irdepends = taskData.tasks_irdepends[task]
524 for (depid, idependtask) in irdepends:
525 if depid in taskData.run_targets:
526 # Won't be in run_targets if ASSUME_PROVIDED
527 depdata = taskData.run_targets[depid][0]
528 if depdata is not None:
529 taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
530 if taskid is None:
531 bb.msg.fatal("RunQueue", "Task %s in %s rdepends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, taskData.fn_index[depdata]))
532 depends.add(taskid)
533
534 # Resolve recursive 'recrdeptask' dependencies (Part A)
535 #
536 # e.g. do_sometask[recrdeptask] = "do_someothertask"
537 # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
538 # We cover the recursive part of the dependencies below
539 if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']:
540 tasknames = task_deps['recrdeptask'][taskData.tasks_name[task]].split()
541 recursivetasks[task] = tasknames
542 add_build_dependencies(taskData.depids[fnid], tasknames, depends)
543 add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
544 if taskData.tasks_name[task] in tasknames:
545 recursivetasksselfref.add(task)
546
547 if 'recideptask' in task_deps and taskData.tasks_name[task] in task_deps['recideptask']:
548 recursiveitasks[task] = []
549 for t in task_deps['recideptask'][taskData.tasks_name[task]].split():
550 newdep = taskData.gettask_id_fromfnid(fnid, t)
551 recursiveitasks[task].append(newdep)
552
553 self.runq_fnid.append(taskData.tasks_fnid[task])
554 self.runq_task.append(taskData.tasks_name[task])
555 self.runq_depends.append(depends)
556 self.runq_revdeps.append(set())
557 self.runq_hash.append("")
558
559 runq_build.append(0)
560
561 # Resolve recursive 'recrdeptask' dependencies (Part B)
562 #
563 # e.g. do_sometask[recrdeptask] = "do_someothertask"
564 # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
565 # We need to do this separately since we need all of self.runq_depends to be complete before this is processed
566 extradeps = {}
567 for task in recursivetasks:
568 extradeps[task] = set(self.runq_depends[task])
569 tasknames = recursivetasks[task]
570 seendeps = set()
571 seenfnid = []
572
573 def generate_recdeps(t):
574 newdeps = set()
575 add_resolved_dependencies([taskData.tasks_fnid[t]], tasknames, newdeps)
576 extradeps[task].update(newdeps)
577 seendeps.add(t)
578 newdeps.add(t)
579 for i in newdeps:
580 for n in self.runq_depends[i]:
581 if n not in seendeps:
582 generate_recdeps(n)
583 generate_recdeps(task)
584
585 if task in recursiveitasks:
586 for dep in recursiveitasks[task]:
587 generate_recdeps(dep)
588
589 # Remove circular references so that do_a[recrdeptask] = "do_a do_b" can work
590 for task in recursivetasks:
591 extradeps[task].difference_update(recursivetasksselfref)
592
593 for task in xrange(len(taskData.tasks_name)):
594 # Add in extra dependencies
595 if task in extradeps:
596 self.runq_depends[task] = extradeps[task]
597 # Remove all self references
598 if task in self.runq_depends[task]:
599 logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], self.runq_depends[task])
600 self.runq_depends[task].remove(task)
601
602 # Step B - Mark all active tasks
603 #
604 # Start with the tasks we were asked to run and mark all dependencies
605 # as active too. If the task is to be 'forced', clear its stamp. Once
606 # all active tasks are marked, prune the ones we don't need.
607
608 logger.verbose("Marking Active Tasks")
609
610 def mark_active(listid, depth):
611 """
612 Mark an item as active along with its depends
613 (calls itself recursively)
614 """
615
616 if runq_build[listid] == 1:
617 return
618
619 runq_build[listid] = 1
620
621 depends = self.runq_depends[listid]
622 for depend in depends:
623 mark_active(depend, depth+1)
624
625 self.target_pairs = []
626 for target in self.targets:
627 targetid = taskData.getbuild_id(target[0])
628
629 if targetid not in taskData.build_targets:
630 continue
631
632 if targetid in taskData.failed_deps:
633 continue
634
635 fnid = taskData.build_targets[targetid][0]
636 fn = taskData.fn_index[fnid]
637 self.target_pairs.append((fn, target[1]))
638
639 if fnid in taskData.failed_fnids:
640 continue
641
642 if target[1] not in taskData.tasks_lookup[fnid]:
643 import difflib
644 close_matches = difflib.get_close_matches(target[1], taskData.tasks_lookup[fnid], cutoff=0.7)
645 if close_matches:
646 extra = ". Close matches:\n %s" % "\n ".join(close_matches)
647 else:
648 extra = ""
649 bb.msg.fatal("RunQueue", "Task %s does not exist for target %s%s" % (target[1], target[0], extra))
650
651 listid = taskData.tasks_lookup[fnid][target[1]]
652
653 mark_active(listid, 1)
654
655 # Step C - Prune all inactive tasks
656 #
657 # Once all active tasks are marked, prune the ones we don't need.
658
659 maps = []
660 delcount = 0
661 for listid in xrange(len(self.runq_fnid)):
662 if runq_build[listid-delcount] == 1:
663 maps.append(listid-delcount)
664 else:
665 del self.runq_fnid[listid-delcount]
666 del self.runq_task[listid-delcount]
667 del self.runq_depends[listid-delcount]
668 del runq_build[listid-delcount]
669 del self.runq_revdeps[listid-delcount]
670 del self.runq_hash[listid-delcount]
671 delcount = delcount + 1
672 maps.append(-1)
673
674 #
675 # Step D - Sanity checks and computation
676 #
677
678 # Check to make sure we still have tasks to run
679 if len(self.runq_fnid) == 0:
680 if not taskData.abort:
681 bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
682 else:
683 bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.")
684
685 logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runq_fnid))
686
687 # Remap the dependencies to account for the deleted tasks
688 # Check we didn't delete a task we depend on
689 for listid in xrange(len(self.runq_fnid)):
690 newdeps = []
691 origdeps = self.runq_depends[listid]
692 for origdep in origdeps:
693 if maps[origdep] == -1:
694 bb.msg.fatal("RunQueue", "Invalid mapping - Should never happen!")
695 newdeps.append(maps[origdep])
696 self.runq_depends[listid] = set(newdeps)
697
698 logger.verbose("Assign Weightings")
699
700 # Generate a list of reverse dependencies to ease future calculations
701 for listid in xrange(len(self.runq_fnid)):
702 for dep in self.runq_depends[listid]:
703 self.runq_revdeps[dep].add(listid)
704
705 # Identify tasks at the end of dependency chains
706 # Error on circular dependency loops (length two)
707 endpoints = []
708 for listid in xrange(len(self.runq_fnid)):
709 revdeps = self.runq_revdeps[listid]
710 if len(revdeps) == 0:
711 endpoints.append(listid)
712 for dep in revdeps:
713 if dep in self.runq_depends[listid]:
714 #self.dump_data(taskData)
715 bb.msg.fatal("RunQueue", "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
716
717 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
718
719 # Calculate task weights
720 # Check of higher length circular dependencies
721 self.runq_weight = self.calculate_task_weights(endpoints)
722
723 # Sanity Check - Check for multiple tasks building the same provider
724 prov_list = {}
725 seen_fn = []
726 for task in xrange(len(self.runq_fnid)):
727 fn = taskData.fn_index[self.runq_fnid[task]]
728 if fn in seen_fn:
729 continue
730 seen_fn.append(fn)
731 for prov in self.dataCache.fn_provides[fn]:
732 if prov not in prov_list:
733 prov_list[prov] = [fn]
734 elif fn not in prov_list[prov]:
735 prov_list[prov].append(fn)
736 for prov in prov_list:
737 if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
738 seen_pn = []
739 # If two versions of the same PN are being built its fatal, we don't support it.
740 for fn in prov_list[prov]:
741 pn = self.dataCache.pkg_fn[fn]
742 if pn not in seen_pn:
743 seen_pn.append(pn)
744 else:
745 bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn))
746 msg = "Multiple .bb files are due to be built which each provide %s (%s)." % (prov, " ".join(prov_list[prov]))
747 if self.warn_multi_bb:
748 logger.warn(msg)
749 else:
750 msg += "\n This usually means one provides something the other doesn't and should."
751 logger.error(msg)
752
753 # Create a whitelist usable by the stamp checks
754 stampfnwhitelist = []
755 for entry in self.stampwhitelist.split():
756 entryid = self.taskData.getbuild_id(entry)
757 if entryid not in self.taskData.build_targets:
758 continue
759 fnid = self.taskData.build_targets[entryid][0]
760 fn = self.taskData.fn_index[fnid]
761 stampfnwhitelist.append(fn)
762 self.stampfnwhitelist = stampfnwhitelist
763
764 # Iterate over the task list looking for tasks with a 'setscene' function
765 self.runq_setscene = []
766 if not self.cooker.configuration.nosetscene:
767 for task in range(len(self.runq_fnid)):
768 setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False)
769 if not setscene:
770 continue
771 self.runq_setscene.append(task)
772
773 def invalidate_task(fn, taskname, error_nostamp):
774 taskdep = self.dataCache.task_deps[fn]
775 fnid = self.taskData.getfn_id(fn)
776 if taskname not in taskData.tasks_lookup[fnid]:
777 logger.warn("Task %s does not exist, invalidating this task will have no effect" % taskname)
778 if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
779 if error_nostamp:
780 bb.fatal("Task %s is marked nostamp, cannot invalidate this task" % taskname)
781 else:
782 bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
783 else:
784 logger.verbose("Invalidate task %s, %s", taskname, fn)
785 bb.parse.siggen.invalidate_task(taskname, self.dataCache, fn)
786
787 # Invalidate task if force mode active
788 if self.cooker.configuration.force:
789 for (fn, target) in self.target_pairs:
790 invalidate_task(fn, target, False)
791
792 # Invalidate task if invalidate mode active
793 if self.cooker.configuration.invalidate_stamp:
794 for (fn, target) in self.target_pairs:
795 for st in self.cooker.configuration.invalidate_stamp.split(','):
796 invalidate_task(fn, "do_%s" % st, True)
797
798 # Interate over the task list and call into the siggen code
799 dealtwith = set()
800 todeal = set(range(len(self.runq_fnid)))
801 while len(todeal) > 0:
802 for task in todeal.copy():
803 if len(self.runq_depends[task] - dealtwith) == 0:
804 dealtwith.add(task)
805 todeal.remove(task)
806 procdep = []
807 for dep in self.runq_depends[task]:
808 procdep.append(self.taskData.fn_index[self.runq_fnid[dep]] + "." + self.runq_task[dep])
809 self.runq_hash[task] = bb.parse.siggen.get_taskhash(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task], procdep, self.dataCache)
810
811 return len(self.runq_fnid)
812
813 def dump_data(self, taskQueue):
814 """
815 Dump some debug information on the internal data structures
816 """
817 logger.debug(3, "run_tasks:")
818 for task in xrange(len(self.rqdata.runq_task)):
819 logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
820 taskQueue.fn_index[self.rqdata.runq_fnid[task]],
821 self.rqdata.runq_task[task],
822 self.rqdata.runq_weight[task],
823 self.rqdata.runq_depends[task],
824 self.rqdata.runq_revdeps[task])
825
826 logger.debug(3, "sorted_tasks:")
827 for task1 in xrange(len(self.rqdata.runq_task)):
828 if task1 in self.prio_map:
829 task = self.prio_map[task1]
830 logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
831 taskQueue.fn_index[self.rqdata.runq_fnid[task]],
832 self.rqdata.runq_task[task],
833 self.rqdata.runq_weight[task],
834 self.rqdata.runq_depends[task],
835 self.rqdata.runq_revdeps[task])
836
837class RunQueue:
838 def __init__(self, cooker, cfgData, dataCache, taskData, targets):
839
840 self.cooker = cooker
841 self.cfgData = cfgData
842 self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets)
843
844 self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile"
845 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None
846 self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION", True) or None
847 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID", True) or None
848
849 self.state = runQueuePrepare
850
851 # For disk space monitor
852 self.dm = monitordisk.diskMonitor(cfgData)
853
854 self.rqexe = None
855 self.worker = None
856 self.workerpipe = None
857 self.fakeworker = None
858 self.fakeworkerpipe = None
859
860 def _start_worker(self, fakeroot = False, rqexec = None):
861 logger.debug(1, "Starting bitbake-worker")
862 if fakeroot:
863 fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True)
864 fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split()
865 env = os.environ.copy()
866 for key, value in (var.split('=') for var in fakerootenv):
867 env[key] = value
868 worker = subprocess.Popen([fakerootcmd, "bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
869 else:
870 worker = subprocess.Popen(["bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
871 bb.utils.nonblockingfd(worker.stdout)
872 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
873
874 workerdata = {
875 "taskdeps" : self.rqdata.dataCache.task_deps,
876 "fakerootenv" : self.rqdata.dataCache.fakerootenv,
877 "fakerootdirs" : self.rqdata.dataCache.fakerootdirs,
878 "fakerootnoenv" : self.rqdata.dataCache.fakerootnoenv,
879 "hashes" : bb.parse.siggen.taskhash,
880 "hash_deps" : bb.parse.siggen.runtaskdeps,
881 "sigchecksums" : bb.parse.siggen.file_checksum_values,
882 "runq_hash" : self.rqdata.runq_hash,
883 "logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
884 "logdefaultverbose" : bb.msg.loggerDefaultVerbose,
885 "logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
886 "logdefaultdomain" : bb.msg.loggerDefaultDomains,
887 "prhost" : self.cooker.prhost,
888 "buildname" : self.cfgData.getVar("BUILDNAME", True),
889 "date" : self.cfgData.getVar("DATE", True),
890 "time" : self.cfgData.getVar("TIME", True),
891 }
892
893 worker.stdin.write("<cookerconfig>" + pickle.dumps(self.cooker.configuration) + "</cookerconfig>")
894 worker.stdin.write("<workerdata>" + pickle.dumps(workerdata) + "</workerdata>")
895 worker.stdin.flush()
896
897 return worker, workerpipe
898
899 def _teardown_worker(self, worker, workerpipe):
900 if not worker:
901 return
902 logger.debug(1, "Teardown for bitbake-worker")
903 try:
904 worker.stdin.write("<quit></quit>")
905 worker.stdin.flush()
906 except IOError:
907 pass
908 while worker.returncode is None:
909 workerpipe.read()
910 worker.poll()
911 while workerpipe.read():
912 continue
913 workerpipe.close()
914
915 def start_worker(self):
916 if self.worker:
917 self.teardown_workers()
918 self.teardown = False
919 self.worker, self.workerpipe = self._start_worker()
920
921 def start_fakeworker(self, rqexec):
922 if not self.fakeworker:
923 self.fakeworker, self.fakeworkerpipe = self._start_worker(True, rqexec)
924
925 def teardown_workers(self):
926 self.teardown = True
927 self._teardown_worker(self.worker, self.workerpipe)
928 self.worker = None
929 self.workerpipe = None
930 self._teardown_worker(self.fakeworker, self.fakeworkerpipe)
931 self.fakeworker = None
932 self.fakeworkerpipe = None
933
934 def read_workers(self):
935 self.workerpipe.read()
936 if self.fakeworkerpipe:
937 self.fakeworkerpipe.read()
938
939 def active_fds(self):
940 fds = []
941 if self.workerpipe:
942 fds.append(self.workerpipe.input)
943 if self.fakeworkerpipe:
944 fds.append(self.fakeworkerpipe.input)
945 return fds
946
947 def check_stamp_task(self, task, taskname = None, recurse = False, cache = None):
948 def get_timestamp(f):
949 try:
950 if not os.access(f, os.F_OK):
951 return None
952 return os.stat(f)[stat.ST_MTIME]
953 except:
954 return None
955
956 if self.stamppolicy == "perfile":
957 fulldeptree = False
958 else:
959 fulldeptree = True
960 stampwhitelist = []
961 if self.stamppolicy == "whitelist":
962 stampwhitelist = self.rqdata.stampfnwhitelist
963
964 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
965 if taskname is None:
966 taskname = self.rqdata.runq_task[task]
967
968 stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
969
970 # If the stamp is missing its not current
971 if not os.access(stampfile, os.F_OK):
972 logger.debug(2, "Stampfile %s not available", stampfile)
973 return False
974 # If its a 'nostamp' task, it's not current
975 taskdep = self.rqdata.dataCache.task_deps[fn]
976 if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
977 logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
978 return False
979
980 if taskname != "do_setscene" and taskname.endswith("_setscene"):
981 return True
982
983 if cache is None:
984 cache = {}
985
986 iscurrent = True
987 t1 = get_timestamp(stampfile)
988 for dep in self.rqdata.runq_depends[task]:
989 if iscurrent:
990 fn2 = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[dep]]
991 taskname2 = self.rqdata.runq_task[dep]
992 stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
993 stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCache, fn2)
994 t2 = get_timestamp(stampfile2)
995 t3 = get_timestamp(stampfile3)
996 if t3 and t3 > t2:
997 continue
998 if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
999 if not t2:
1000 logger.debug(2, 'Stampfile %s does not exist', stampfile2)
1001 iscurrent = False
1002 if t1 < t2:
1003 logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
1004 iscurrent = False
1005 if recurse and iscurrent:
1006 if dep in cache:
1007 iscurrent = cache[dep]
1008 if not iscurrent:
1009 logger.debug(2, 'Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2))
1010 else:
1011 iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache)
1012 cache[dep] = iscurrent
1013 if recurse:
1014 cache[task] = iscurrent
1015 return iscurrent
1016
1017 def _execute_runqueue(self):
1018 """
1019 Run the tasks in a queue prepared by rqdata.prepare()
1020 Upon failure, optionally try to recover the build using any alternate providers
1021 (if the abort on failure configuration option isn't set)
1022 """
1023
1024 retval = True
1025
1026 if self.state is runQueuePrepare:
1027 self.rqexe = RunQueueExecuteDummy(self)
1028 if self.rqdata.prepare() == 0:
1029 self.state = runQueueComplete
1030 else:
1031 self.state = runQueueSceneInit
1032
1033 # we are ready to run, see if any UI client needs the dependency info
1034 if bb.cooker.CookerFeatures.SEND_DEPENDS_TREE in self.cooker.featureset:
1035 depgraph = self.cooker.buildDependTree(self, self.rqdata.taskData)
1036 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.cooker.data)
1037
1038 if self.state is runQueueSceneInit:
1039 dump = self.cooker.configuration.dump_signatures
1040 if dump:
1041 if 'printdiff' in dump:
1042 invalidtasks = self.print_diffscenetasks()
1043 self.dump_signatures(dump)
1044 if 'printdiff' in dump:
1045 self.write_diffscenetasks(invalidtasks)
1046 self.state = runQueueComplete
1047 else:
1048 self.start_worker()
1049 self.rqexe = RunQueueExecuteScenequeue(self)
1050
1051 if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp]:
1052 self.dm.check(self)
1053
1054 if self.state is runQueueSceneRun:
1055 retval = self.rqexe.execute()
1056
1057 if self.state is runQueueRunInit:
1058 logger.info("Executing RunQueue Tasks")
1059 self.rqexe = RunQueueExecuteTasks(self)
1060 self.state = runQueueRunning
1061
1062 if self.state is runQueueRunning:
1063 retval = self.rqexe.execute()
1064
1065 if self.state is runQueueCleanUp:
1066 self.rqexe.finish()
1067
1068 if self.state is runQueueComplete or self.state is runQueueFailed:
1069 self.teardown_workers()
1070 if self.rqexe.stats.failed:
1071 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
1072 else:
1073 # Let's avoid the word "failed" if nothing actually did
1074 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and all succeeded.", self.rqexe.stats.completed, self.rqexe.stats.skipped)
1075
1076 if self.state is runQueueFailed:
1077 if not self.rqdata.taskData.tryaltconfigs:
1078 raise bb.runqueue.TaskFailure(self.rqexe.failed_fnids)
1079 for fnid in self.rqexe.failed_fnids:
1080 self.rqdata.taskData.fail_fnid(fnid)
1081 self.rqdata.reset()
1082
1083 if self.state is runQueueComplete:
1084 # All done
1085 return False
1086
1087 # Loop
1088 return retval
1089
1090 def execute_runqueue(self):
1091 # Catch unexpected exceptions and ensure we exit when an error occurs, not loop.
1092 try:
1093 return self._execute_runqueue()
1094 except bb.runqueue.TaskFailure:
1095 raise
1096 except SystemExit:
1097 raise
1098 except:
1099 logger.error("An uncaught exception occured in runqueue, please see the failure below:")
1100 try:
1101 self.teardown_workers()
1102 except:
1103 pass
1104 self.state = runQueueComplete
1105 raise
1106
1107 def finish_runqueue(self, now = False):
1108 if not self.rqexe:
1109 return
1110
1111 if now:
1112 self.rqexe.finish_now()
1113 else:
1114 self.rqexe.finish()
1115
1116 def dump_signatures(self, options):
1117 done = set()
1118 bb.note("Reparsing files to collect dependency data")
1119 for task in range(len(self.rqdata.runq_fnid)):
1120 if self.rqdata.runq_fnid[task] not in done:
1121 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1122 the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn), self.cooker.data)
1123 done.add(self.rqdata.runq_fnid[task])
1124
1125 bb.parse.siggen.dump_sigs(self.rqdata.dataCache, options)
1126
1127 return
1128
1129 def print_diffscenetasks(self):
1130
1131 valid = []
1132 sq_hash = []
1133 sq_hashfn = []
1134 sq_fn = []
1135 sq_taskname = []
1136 sq_task = []
1137 noexec = []
1138 stamppresent = []
1139 valid_new = set()
1140
1141 for task in xrange(len(self.rqdata.runq_fnid)):
1142 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1143 taskname = self.rqdata.runq_task[task]
1144 taskdep = self.rqdata.dataCache.task_deps[fn]
1145
1146 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1147 noexec.append(task)
1148 continue
1149
1150 sq_fn.append(fn)
1151 sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
1152 sq_hash.append(self.rqdata.runq_hash[task])
1153 sq_taskname.append(taskname)
1154 sq_task.append(task)
1155 call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
1156 locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
1157 valid = bb.utils.better_eval(call, locs)
1158 for v in valid:
1159 valid_new.add(sq_task[v])
1160
1161 # Tasks which are both setscene and noexec never care about dependencies
1162 # We therefore find tasks which are setscene and noexec and mark their
1163 # unique dependencies as valid.
1164 for task in noexec:
1165 if task not in self.rqdata.runq_setscene:
1166 continue
1167 for dep in self.rqdata.runq_depends[task]:
1168 hasnoexecparents = True
1169 for dep2 in self.rqdata.runq_revdeps[dep]:
1170 if dep2 in self.rqdata.runq_setscene and dep2 in noexec:
1171 continue
1172 hasnoexecparents = False
1173 break
1174 if hasnoexecparents:
1175 valid_new.add(dep)
1176
1177 invalidtasks = set()
1178 for task in xrange(len(self.rqdata.runq_fnid)):
1179 if task not in valid_new and task not in noexec:
1180 invalidtasks.add(task)
1181
1182 found = set()
1183 processed = set()
1184 for task in invalidtasks:
1185 toprocess = set([task])
1186 while toprocess:
1187 next = set()
1188 for t in toprocess:
1189 for dep in self.rqdata.runq_depends[t]:
1190 if dep in invalidtasks:
1191 found.add(task)
1192 if dep not in processed:
1193 processed.add(dep)
1194 next.add(dep)
1195 toprocess = next
1196 if task in found:
1197 toprocess = set()
1198
1199 tasklist = []
1200 for task in invalidtasks.difference(found):
1201 tasklist.append(self.rqdata.get_user_idstring(task))
1202
1203 if tasklist:
1204 bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist))
1205
1206 return invalidtasks.difference(found)
1207
1208 def write_diffscenetasks(self, invalidtasks):
1209
1210 # Define recursion callback
1211 def recursecb(key, hash1, hash2):
1212 hashes = [hash1, hash2]
1213 hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData)
1214
1215 recout = []
1216 if len(hashfiles) == 2:
1217 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb)
1218 recout.extend(list(' ' + l for l in out2))
1219 else:
1220 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
1221
1222 return recout
1223
1224
1225 for task in invalidtasks:
1226 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1227 pn = self.rqdata.dataCache.pkg_fn[fn]
1228 taskname = self.rqdata.runq_task[task]
1229 h = self.rqdata.runq_hash[task]
1230 matches = bb.siggen.find_siginfo(pn, taskname, [], self.cfgData)
1231 match = None
1232 for m in matches:
1233 if h in m:
1234 match = m
1235 if match is None:
1236 bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h)
1237 matches = {k : v for k, v in matches.iteritems() if h not in k}
1238 if matches:
1239 latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
1240 prevh = __find_md5__.search(latestmatch).group(0)
1241 output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
1242 bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
1243
1244class RunQueueExecute:
1245
1246 def __init__(self, rq):
1247 self.rq = rq
1248 self.cooker = rq.cooker
1249 self.cfgData = rq.cfgData
1250 self.rqdata = rq.rqdata
1251
1252 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", True) or 1)
1253 self.scheduler = self.cfgData.getVar("BB_SCHEDULER", True) or "speed"
1254
1255 self.runq_buildable = []
1256 self.runq_running = []
1257 self.runq_complete = []
1258
1259 self.build_stamps = {}
1260 self.build_stamps2 = []
1261 self.failed_fnids = []
1262
1263 self.stampcache = {}
1264
1265 rq.workerpipe.setrunqueueexec(self)
1266 if rq.fakeworkerpipe:
1267 rq.fakeworkerpipe.setrunqueueexec(self)
1268
1269 def runqueue_process_waitpid(self, task, status):
1270
1271 # self.build_stamps[pid] may not exist when use shared work directory.
1272 if task in self.build_stamps:
1273 self.build_stamps2.remove(self.build_stamps[task])
1274 del self.build_stamps[task]
1275
1276 if status != 0:
1277 self.task_fail(task, status)
1278 else:
1279 self.task_complete(task)
1280 return True
1281
1282 def finish_now(self):
1283
1284 for worker in [self.rq.worker, self.rq.fakeworker]:
1285 if not worker:
1286 continue
1287 try:
1288 worker.stdin.write("<finishnow></finishnow>")
1289 worker.stdin.flush()
1290 except IOError:
1291 # worker must have died?
1292 pass
1293
1294 if len(self.failed_fnids) != 0:
1295 self.rq.state = runQueueFailed
1296 return
1297
1298 self.rq.state = runQueueComplete
1299 return
1300
1301 def finish(self):
1302 self.rq.state = runQueueCleanUp
1303
1304 if self.stats.active > 0:
1305 bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
1306 self.rq.read_workers()
1307
1308 return
1309
1310 if len(self.failed_fnids) != 0:
1311 self.rq.state = runQueueFailed
1312 return
1313
1314 self.rq.state = runQueueComplete
1315 return
1316
1317 def check_dependencies(self, task, taskdeps, setscene = False):
1318 if not self.rq.depvalidate:
1319 return False
1320
1321 taskdata = {}
1322 taskdeps.add(task)
1323 for dep in taskdeps:
1324 if setscene:
1325 depid = self.rqdata.runq_setscene[dep]
1326 else:
1327 depid = dep
1328 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[depid]]
1329 pn = self.rqdata.dataCache.pkg_fn[fn]
1330 taskname = self.rqdata.runq_task[depid]
1331 taskdata[dep] = [pn, taskname, fn]
1332 call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
1333 locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data }
1334 valid = bb.utils.better_eval(call, locs)
1335 return valid
1336
1337class RunQueueExecuteDummy(RunQueueExecute):
1338 def __init__(self, rq):
1339 self.rq = rq
1340 self.stats = RunQueueStats(0)
1341
1342 def finish(self):
1343 self.rq.state = runQueueComplete
1344 return
1345
1346class RunQueueExecuteTasks(RunQueueExecute):
1347 def __init__(self, rq):
1348 RunQueueExecute.__init__(self, rq)
1349
1350 self.stats = RunQueueStats(len(self.rqdata.runq_fnid))
1351
1352 self.stampcache = {}
1353
1354 initial_covered = self.rq.scenequeue_covered.copy()
1355
1356 # Mark initial buildable tasks
1357 for task in xrange(self.stats.total):
1358 self.runq_running.append(0)
1359 self.runq_complete.append(0)
1360 if len(self.rqdata.runq_depends[task]) == 0:
1361 self.runq_buildable.append(1)
1362 else:
1363 self.runq_buildable.append(0)
1364 if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered) and task not in self.rq.scenequeue_notcovered:
1365 self.rq.scenequeue_covered.add(task)
1366
1367 found = True
1368 while found:
1369 found = False
1370 for task in xrange(self.stats.total):
1371 if task in self.rq.scenequeue_covered:
1372 continue
1373 logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task])))
1374
1375 if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered) and task not in self.rq.scenequeue_notcovered:
1376 found = True
1377 self.rq.scenequeue_covered.add(task)
1378
1379 logger.debug(1, 'Skip list (pre setsceneverify) %s', sorted(self.rq.scenequeue_covered))
1380
1381 # Allow the metadata to elect for setscene tasks to run anyway
1382 covered_remove = set()
1383 if self.rq.setsceneverify:
1384 invalidtasks = []
1385 for task in xrange(len(self.rqdata.runq_task)):
1386 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1387 taskname = self.rqdata.runq_task[task]
1388 taskdep = self.rqdata.dataCache.task_deps[fn]
1389
1390 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1391 continue
1392 if self.rq.check_stamp_task(task, taskname + "_setscene", cache=self.stampcache):
1393 logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
1394 continue
1395 if self.rq.check_stamp_task(task, taskname, recurse = True, cache=self.stampcache):
1396 logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
1397 continue
1398 invalidtasks.append(task)
1399
1400 call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
1401 call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
1402 locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.data, "invalidtasks" : invalidtasks }
1403 # Backwards compatibility with older versions without invalidtasks
1404 try:
1405 covered_remove = bb.utils.better_eval(call, locs)
1406 except TypeError:
1407 covered_remove = bb.utils.better_eval(call2, locs)
1408
1409 def removecoveredtask(task):
1410 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1411 taskname = self.rqdata.runq_task[task] + '_setscene'
1412 bb.build.del_stamp(taskname, self.rqdata.dataCache, fn)
1413 self.rq.scenequeue_covered.remove(task)
1414
1415 toremove = covered_remove
1416 for task in toremove:
1417 logger.debug(1, 'Not skipping task %s due to setsceneverify', task)
1418 while toremove:
1419 covered_remove = []
1420 for task in toremove:
1421 removecoveredtask(task)
1422 for deptask in self.rqdata.runq_depends[task]:
1423 if deptask not in self.rq.scenequeue_covered:
1424 continue
1425 if deptask in toremove or deptask in covered_remove or deptask in initial_covered:
1426 continue
1427 logger.debug(1, 'Task %s depends on task %s so not skipping' % (task, deptask))
1428 covered_remove.append(deptask)
1429 toremove = covered_remove
1430
1431 logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
1432
1433 event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData)
1434
1435 schedulers = self.get_schedulers()
1436 for scheduler in schedulers:
1437 if self.scheduler == scheduler.name:
1438 self.sched = scheduler(self, self.rqdata)
1439 logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
1440 break
1441 else:
1442 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
1443 (self.scheduler, ", ".join(obj.name for obj in schedulers)))
1444
1445 def get_schedulers(self):
1446 schedulers = set(obj for obj in globals().values()
1447 if type(obj) is type and
1448 issubclass(obj, RunQueueScheduler))
1449
1450 user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True)
1451 if user_schedulers:
1452 for sched in user_schedulers.split():
1453 if not "." in sched:
1454 bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched)
1455 continue
1456
1457 modname, name = sched.rsplit(".", 1)
1458 try:
1459 module = __import__(modname, fromlist=(name,))
1460 except ImportError as exc:
1461 logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
1462 raise SystemExit(1)
1463 else:
1464 schedulers.add(getattr(module, name))
1465 return schedulers
1466
1467 def setbuildable(self, task):
1468 self.runq_buildable[task] = 1
1469 self.sched.newbuilable(task)
1470
1471 def task_completeoutright(self, task):
1472 """
1473 Mark a task as completed
1474 Look at the reverse dependencies and mark any task with
1475 completed dependencies as buildable
1476 """
1477 self.runq_complete[task] = 1
1478 for revdep in self.rqdata.runq_revdeps[task]:
1479 if self.runq_running[revdep] == 1:
1480 continue
1481 if self.runq_buildable[revdep] == 1:
1482 continue
1483 alldeps = 1
1484 for dep in self.rqdata.runq_depends[revdep]:
1485 if self.runq_complete[dep] != 1:
1486 alldeps = 0
1487 if alldeps == 1:
1488 self.setbuildable(revdep)
1489 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
1490 taskname = self.rqdata.runq_task[revdep]
1491 logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
1492
1493 def task_complete(self, task):
1494 self.stats.taskCompleted()
1495 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
1496 self.task_completeoutright(task)
1497
1498 def task_fail(self, task, exitcode):
1499 """
1500 Called when a task has failed
1501 Updates the state engine with the failure
1502 """
1503 self.stats.taskFailed()
1504 fnid = self.rqdata.runq_fnid[task]
1505 self.failed_fnids.append(fnid)
1506 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData)
1507 if self.rqdata.taskData.abort:
1508 self.rq.state = runQueueCleanUp
1509
1510 def task_skip(self, task, reason):
1511 self.runq_running[task] = 1
1512 self.setbuildable(task)
1513 bb.event.fire(runQueueTaskSkipped(task, self.stats, self.rq, reason), self.cfgData)
1514 self.task_completeoutright(task)
1515 self.stats.taskCompleted()
1516 self.stats.taskSkipped()
1517
1518 def execute(self):
1519 """
1520 Run the tasks in a queue prepared by rqdata.prepare()
1521 """
1522
1523 self.rq.read_workers()
1524
1525
1526 if self.stats.total == 0:
1527 # nothing to do
1528 self.rq.state = runQueueCleanUp
1529
1530 task = self.sched.next()
1531 if task is not None:
1532 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1533 taskname = self.rqdata.runq_task[task]
1534
1535 if task in self.rq.scenequeue_covered:
1536 logger.debug(2, "Setscene covered task %s (%s)", task,
1537 self.rqdata.get_user_idstring(task))
1538 self.task_skip(task, "covered")
1539 return True
1540
1541 if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
1542 logger.debug(2, "Stamp current task %s (%s)", task,
1543 self.rqdata.get_user_idstring(task))
1544 self.task_skip(task, "existing")
1545 return True
1546
1547 taskdep = self.rqdata.dataCache.task_deps[fn]
1548 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1549 startevent = runQueueTaskStarted(task, self.stats, self.rq,
1550 noexec=True)
1551 bb.event.fire(startevent, self.cfgData)
1552 self.runq_running[task] = 1
1553 self.stats.taskActive()
1554 bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
1555 self.task_complete(task)
1556 return True
1557 else:
1558 startevent = runQueueTaskStarted(task, self.stats, self.rq)
1559 bb.event.fire(startevent, self.cfgData)
1560
1561 taskdepdata = self.build_taskdepdata(task)
1562
1563 taskdep = self.rqdata.dataCache.task_deps[fn]
1564 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
1565 if not self.rq.fakeworker:
1566 self.rq.start_fakeworker(self)
1567 self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
1568 self.rq.fakeworker.stdin.flush()
1569 else:
1570 self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
1571 self.rq.worker.stdin.flush()
1572
1573 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
1574 self.build_stamps2.append(self.build_stamps[task])
1575 self.runq_running[task] = 1
1576 self.stats.taskActive()
1577 if self.stats.active < self.number_tasks:
1578 return True
1579
1580 if self.stats.active > 0:
1581 self.rq.read_workers()
1582 return self.rq.active_fds()
1583
1584 if len(self.failed_fnids) != 0:
1585 self.rq.state = runQueueFailed
1586 return True
1587
1588 # Sanity Checks
1589 for task in xrange(self.stats.total):
1590 if self.runq_buildable[task] == 0:
1591 logger.error("Task %s never buildable!", task)
1592 if self.runq_running[task] == 0:
1593 logger.error("Task %s never ran!", task)
1594 if self.runq_complete[task] == 0:
1595 logger.error("Task %s never completed!", task)
1596 self.rq.state = runQueueComplete
1597
1598 return True
1599
1600 def build_taskdepdata(self, task):
1601 taskdepdata = {}
1602 next = self.rqdata.runq_depends[task]
1603 next.add(task)
1604 while next:
1605 additional = []
1606 for revdep in next:
1607 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
1608 pn = self.rqdata.dataCache.pkg_fn[fn]
1609 taskname = self.rqdata.runq_task[revdep]
1610 deps = self.rqdata.runq_depends[revdep]
1611 taskdepdata[revdep] = [pn, taskname, fn, deps]
1612 for revdep2 in deps:
1613 if revdep2 not in taskdepdata:
1614 additional.append(revdep2)
1615 next = additional
1616
1617 #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n"))
1618 return taskdepdata
1619
1620class RunQueueExecuteScenequeue(RunQueueExecute):
1621 def __init__(self, rq):
1622 RunQueueExecute.__init__(self, rq)
1623
1624 self.scenequeue_covered = set()
1625 self.scenequeue_notcovered = set()
1626 self.scenequeue_notneeded = set()
1627
1628 # If we don't have any setscene functions, skip this step
1629 if len(self.rqdata.runq_setscene) == 0:
1630 rq.scenequeue_covered = set()
1631 rq.state = runQueueRunInit
1632 return
1633
1634 self.stats = RunQueueStats(len(self.rqdata.runq_setscene))
1635
1636 sq_revdeps = []
1637 sq_revdeps_new = []
1638 sq_revdeps_squash = []
1639 self.sq_harddeps = {}
1640
1641 # We need to construct a dependency graph for the setscene functions. Intermediate
1642 # dependencies between the setscene tasks only complicate the code. This code
1643 # therefore aims to collapse the huge runqueue dependency tree into a smaller one
1644 # only containing the setscene functions.
1645
1646 for task in xrange(self.stats.total):
1647 self.runq_running.append(0)
1648 self.runq_complete.append(0)
1649 self.runq_buildable.append(0)
1650
1651 # First process the chains up to the first setscene task.
1652 endpoints = {}
1653 for task in xrange(len(self.rqdata.runq_fnid)):
1654 sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task]))
1655 sq_revdeps_new.append(set())
1656 if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
1657 endpoints[task] = set()
1658
1659 # Secondly process the chains between setscene tasks.
1660 for task in self.rqdata.runq_setscene:
1661 for dep in self.rqdata.runq_depends[task]:
1662 if dep not in endpoints:
1663 endpoints[dep] = set()
1664 endpoints[dep].add(task)
1665
1666 def process_endpoints(endpoints):
1667 newendpoints = {}
1668 for point, task in endpoints.items():
1669 tasks = set()
1670 if task:
1671 tasks |= task
1672 if sq_revdeps_new[point]:
1673 tasks |= sq_revdeps_new[point]
1674 sq_revdeps_new[point] = set()
1675 if point in self.rqdata.runq_setscene:
1676 sq_revdeps_new[point] = tasks
1677 for dep in self.rqdata.runq_depends[point]:
1678 if point in sq_revdeps[dep]:
1679 sq_revdeps[dep].remove(point)
1680 if tasks:
1681 sq_revdeps_new[dep] |= tasks
1682 if (len(sq_revdeps[dep]) == 0 or len(sq_revdeps_new[dep]) != 0) and dep not in self.rqdata.runq_setscene:
1683 newendpoints[dep] = task
1684 if len(newendpoints) != 0:
1685 process_endpoints(newendpoints)
1686
1687 process_endpoints(endpoints)
1688
1689 # Build a list of setscene tasks which as "unskippable"
1690 # These are direct endpoints referenced by the build
1691 endpoints2 = {}
1692 sq_revdeps2 = []
1693 sq_revdeps_new2 = []
1694 def process_endpoints2(endpoints):
1695 newendpoints = {}
1696 for point, task in endpoints.items():
1697 tasks = set([point])
1698 if task:
1699 tasks |= task
1700 if sq_revdeps_new2[point]:
1701 tasks |= sq_revdeps_new2[point]
1702 sq_revdeps_new2[point] = set()
1703 if point in self.rqdata.runq_setscene:
1704 sq_revdeps_new2[point] = tasks
1705 for dep in self.rqdata.runq_depends[point]:
1706 if point in sq_revdeps2[dep]:
1707 sq_revdeps2[dep].remove(point)
1708 if tasks:
1709 sq_revdeps_new2[dep] |= tasks
1710 if (len(sq_revdeps2[dep]) == 0 or len(sq_revdeps_new2[dep]) != 0) and dep not in self.rqdata.runq_setscene:
1711 newendpoints[dep] = tasks
1712 if len(newendpoints) != 0:
1713 process_endpoints2(newendpoints)
1714 for task in xrange(len(self.rqdata.runq_fnid)):
1715 sq_revdeps2.append(copy.copy(self.rqdata.runq_revdeps[task]))
1716 sq_revdeps_new2.append(set())
1717 if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
1718 endpoints2[task] = set()
1719 process_endpoints2(endpoints2)
1720 self.unskippable = []
1721 for task in self.rqdata.runq_setscene:
1722 if sq_revdeps_new2[task]:
1723 self.unskippable.append(self.rqdata.runq_setscene.index(task))
1724
1725 for task in xrange(len(self.rqdata.runq_fnid)):
1726 if task in self.rqdata.runq_setscene:
1727 deps = set()
1728 for dep in sq_revdeps_new[task]:
1729 deps.add(self.rqdata.runq_setscene.index(dep))
1730 sq_revdeps_squash.append(deps)
1731 elif len(sq_revdeps_new[task]) != 0:
1732 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.")
1733
1734 # Resolve setscene inter-task dependencies
1735 # e.g. do_sometask_setscene[depends] = "targetname:do_someothertask_setscene"
1736 # Note that anything explicitly depended upon will have its reverse dependencies removed to avoid circular dependencies
1737 for task in self.rqdata.runq_setscene:
1738 realid = self.rqdata.taskData.gettask_id(self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]], self.rqdata.runq_task[task] + "_setscene", False)
1739 idepends = self.rqdata.taskData.tasks_idepends[realid]
1740 for (depid, idependtask) in idepends:
1741 if depid not in self.rqdata.taskData.build_targets:
1742 continue
1743
1744 depdata = self.rqdata.taskData.build_targets[depid][0]
1745 if depdata is None:
1746 continue
1747 dep = self.rqdata.taskData.fn_index[depdata]
1748 taskid = self.rqdata.get_task_id(self.rqdata.taskData.getfn_id(dep), idependtask.replace("_setscene", ""))
1749 if taskid is None:
1750 bb.msg.fatal("RunQueue", "Task %s_setscene depends upon non-existent task %s:%s" % (self.rqdata.get_user_idstring(task), dep, idependtask))
1751
1752 if not self.rqdata.runq_setscene.index(taskid) in self.sq_harddeps:
1753 self.sq_harddeps[self.rqdata.runq_setscene.index(taskid)] = set()
1754 self.sq_harddeps[self.rqdata.runq_setscene.index(taskid)].add(self.rqdata.runq_setscene.index(task))
1755
1756 sq_revdeps_squash[self.rqdata.runq_setscene.index(task)].add(self.rqdata.runq_setscene.index(taskid))
1757 # Have to zero this to avoid circular dependencies
1758 sq_revdeps_squash[self.rqdata.runq_setscene.index(taskid)] = set()
1759
1760 for task in self.sq_harddeps:
1761 for dep in self.sq_harddeps[task]:
1762 sq_revdeps_squash[dep].add(task)
1763
1764 #for task in xrange(len(sq_revdeps_squash)):
1765 # realtask = self.rqdata.runq_setscene[task]
1766 # bb.warn("Task %s: %s_setscene is %s " % (task, self.rqdata.get_user_idstring(realtask) , sq_revdeps_squash[task]))
1767
1768 self.sq_deps = []
1769 self.sq_revdeps = sq_revdeps_squash
1770 self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps)
1771
1772 for task in xrange(len(self.sq_revdeps)):
1773 self.sq_deps.append(set())
1774 for task in xrange(len(self.sq_revdeps)):
1775 for dep in self.sq_revdeps[task]:
1776 self.sq_deps[dep].add(task)
1777
1778 for task in xrange(len(self.sq_revdeps)):
1779 if len(self.sq_revdeps[task]) == 0:
1780 self.runq_buildable[task] = 1
1781
1782 self.outrightfail = []
1783 if self.rq.hashvalidate:
1784 sq_hash = []
1785 sq_hashfn = []
1786 sq_fn = []
1787 sq_taskname = []
1788 sq_task = []
1789 noexec = []
1790 stamppresent = []
1791 for task in xrange(len(self.sq_revdeps)):
1792 realtask = self.rqdata.runq_setscene[task]
1793 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
1794 taskname = self.rqdata.runq_task[realtask]
1795 taskdep = self.rqdata.dataCache.task_deps[fn]
1796
1797 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1798 noexec.append(task)
1799 self.task_skip(task)
1800 bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCache, fn)
1801 continue
1802
1803 if self.rq.check_stamp_task(realtask, taskname + "_setscene", cache=self.stampcache):
1804 logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
1805 stamppresent.append(task)
1806 self.task_skip(task)
1807 continue
1808
1809 if self.rq.check_stamp_task(realtask, taskname, recurse = True, cache=self.stampcache):
1810 logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
1811 stamppresent.append(task)
1812 self.task_skip(task)
1813 continue
1814
1815 sq_fn.append(fn)
1816 sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
1817 sq_hash.append(self.rqdata.runq_hash[realtask])
1818 sq_taskname.append(taskname)
1819 sq_task.append(task)
1820 call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
1821 locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
1822 valid = bb.utils.better_eval(call, locs)
1823
1824 valid_new = stamppresent
1825 for v in valid:
1826 valid_new.append(sq_task[v])
1827
1828 for task in xrange(len(self.sq_revdeps)):
1829 if task not in valid_new and task not in noexec:
1830 realtask = self.rqdata.runq_setscene[task]
1831 logger.debug(2, 'No package found, so skipping setscene task %s',
1832 self.rqdata.get_user_idstring(realtask))
1833 self.outrightfail.append(task)
1834
1835 logger.info('Executing SetScene Tasks')
1836
1837 self.rq.state = runQueueSceneRun
1838
1839 def scenequeue_updatecounters(self, task, fail = False):
1840 for dep in self.sq_deps[task]:
1841 if fail and task in self.sq_harddeps and dep in self.sq_harddeps[task]:
1842 realtask = self.rqdata.runq_setscene[task]
1843 realdep = self.rqdata.runq_setscene[dep]
1844 logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (self.rqdata.get_user_idstring(realtask), self.rqdata.get_user_idstring(realdep)))
1845 continue
1846 self.sq_revdeps2[dep].remove(task)
1847 if len(self.sq_revdeps2[dep]) == 0:
1848 self.runq_buildable[dep] = 1
1849
1850 def task_completeoutright(self, task):
1851 """
1852 Mark a task as completed
1853 Look at the reverse dependencies and mark any task with
1854 completed dependencies as buildable
1855 """
1856
1857 index = self.rqdata.runq_setscene[task]
1858 logger.debug(1, 'Found task %s which could be accelerated',
1859 self.rqdata.get_user_idstring(index))
1860
1861 self.scenequeue_covered.add(task)
1862 self.scenequeue_updatecounters(task)
1863
1864 def task_complete(self, task):
1865 self.stats.taskCompleted()
1866 bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
1867 self.task_completeoutright(task)
1868
1869 def task_fail(self, task, result):
1870 self.stats.taskFailed()
1871 bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
1872 self.scenequeue_notcovered.add(task)
1873 self.scenequeue_updatecounters(task, True)
1874
1875 def task_failoutright(self, task):
1876 self.runq_running[task] = 1
1877 self.runq_buildable[task] = 1
1878 self.stats.taskCompleted()
1879 self.stats.taskSkipped()
1880 index = self.rqdata.runq_setscene[task]
1881 self.scenequeue_notcovered.add(task)
1882 self.scenequeue_updatecounters(task, True)
1883
1884 def task_skip(self, task):
1885 self.runq_running[task] = 1
1886 self.runq_buildable[task] = 1
1887 self.task_completeoutright(task)
1888 self.stats.taskCompleted()
1889 self.stats.taskSkipped()
1890
1891 def execute(self):
1892 """
1893 Run the tasks in a queue prepared by prepare_runqueue
1894 """
1895
1896 self.rq.read_workers()
1897
1898 task = None
1899 if self.stats.active < self.number_tasks:
1900 # Find the next setscene to run
1901 for nexttask in xrange(self.stats.total):
1902 if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1:
1903 if nexttask in self.unskippable:
1904 logger.debug(2, "Setscene task %s is unskippable" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask]))
1905 if nexttask not in self.unskippable and len(self.sq_revdeps[nexttask]) > 0 and self.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sq_revdeps[nexttask], True):
1906 realtask = self.rqdata.runq_setscene[nexttask]
1907 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
1908 foundtarget = False
1909 for target in self.rqdata.target_pairs:
1910 if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
1911 foundtarget = True
1912 break
1913 if not foundtarget:
1914 logger.debug(2, "Skipping setscene for task %s" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask]))
1915 self.task_skip(nexttask)
1916 self.scenequeue_notneeded.add(nexttask)
1917 return True
1918 if nexttask in self.outrightfail:
1919 self.task_failoutright(nexttask)
1920 return True
1921 task = nexttask
1922 break
1923 if task is not None:
1924 realtask = self.rqdata.runq_setscene[task]
1925 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
1926
1927 taskname = self.rqdata.runq_task[realtask] + "_setscene"
1928 if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask], recurse = True, cache=self.stampcache):
1929 logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
1930 task, self.rqdata.get_user_idstring(realtask))
1931 self.task_failoutright(task)
1932 return True
1933
1934 if self.cooker.configuration.force:
1935 for target in self.rqdata.target_pairs:
1936 if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
1937 self.task_failoutright(task)
1938 return True
1939
1940 if self.rq.check_stamp_task(realtask, taskname, cache=self.stampcache):
1941 logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
1942 task, self.rqdata.get_user_idstring(realtask))
1943 self.task_skip(task)
1944 return True
1945
1946 startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
1947 bb.event.fire(startevent, self.cfgData)
1948
1949 taskdep = self.rqdata.dataCache.task_deps[fn]
1950 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot']:
1951 if not self.rq.fakeworker:
1952 self.rq.start_fakeworker(self)
1953 self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
1954 self.rq.fakeworker.stdin.flush()
1955 else:
1956 self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
1957 self.rq.worker.stdin.flush()
1958
1959 self.runq_running[task] = 1
1960 self.stats.taskActive()
1961 if self.stats.active < self.number_tasks:
1962 return True
1963
1964 if self.stats.active > 0:
1965 self.rq.read_workers()
1966 return self.rq.active_fds()
1967
1968 #for task in xrange(self.stats.total):
1969 # if self.runq_running[task] != 1:
1970 # buildable = self.runq_buildable[task]
1971 # revdeps = self.sq_revdeps[task]
1972 # bb.warn("Found we didn't run %s %s %s %s" % (task, buildable, str(revdeps), self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task])))
1973
1974 # Convert scenequeue_covered task numbers into full taskgraph ids
1975 oldcovered = self.scenequeue_covered
1976 self.rq.scenequeue_covered = set()
1977 for task in oldcovered:
1978 self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task])
1979 self.rq.scenequeue_notcovered = set()
1980 for task in self.scenequeue_notcovered:
1981 self.rq.scenequeue_notcovered.add(self.rqdata.runq_setscene[task])
1982
1983 logger.debug(1, 'We can skip tasks %s', sorted(self.rq.scenequeue_covered))
1984
1985 self.rq.state = runQueueRunInit
1986 return True
1987
1988 def runqueue_process_waitpid(self, task, status):
1989 task = self.rq.rqdata.runq_setscene.index(task)
1990
1991 RunQueueExecute.runqueue_process_waitpid(self, task, status)
1992
1993class TaskFailure(Exception):
1994 """
1995 Exception raised when a task in a runqueue fails
1996 """
1997 def __init__(self, x):
1998 self.args = x
1999
2000
2001class runQueueExitWait(bb.event.Event):
2002 """
2003 Event when waiting for task processes to exit
2004 """
2005
2006 def __init__(self, remain):
2007 self.remain = remain
2008 self.message = "Waiting for %s active tasks to finish" % remain
2009 bb.event.Event.__init__(self)
2010
2011class runQueueEvent(bb.event.Event):
2012 """
2013 Base runQueue event class
2014 """
2015 def __init__(self, task, stats, rq):
2016 self.taskid = task
2017 self.taskstring = rq.rqdata.get_user_idstring(task)
2018 self.taskname = rq.rqdata.get_task_name(task)
2019 self.taskfile = rq.rqdata.get_task_file(task)
2020 self.taskhash = rq.rqdata.get_task_hash(task)
2021 self.stats = stats.copy()
2022 bb.event.Event.__init__(self)
2023
2024class sceneQueueEvent(runQueueEvent):
2025 """
2026 Base sceneQueue event class
2027 """
2028 def __init__(self, task, stats, rq, noexec=False):
2029 runQueueEvent.__init__(self, task, stats, rq)
2030 realtask = rq.rqdata.runq_setscene[task]
2031 self.taskstring = rq.rqdata.get_user_idstring(realtask, "_setscene")
2032 self.taskname = rq.rqdata.get_task_name(realtask) + "_setscene"
2033 self.taskfile = rq.rqdata.get_task_file(realtask)
2034 self.taskhash = rq.rqdata.get_task_hash(realtask)
2035
2036class runQueueTaskStarted(runQueueEvent):
2037 """
2038 Event notifing a task was started
2039 """
2040 def __init__(self, task, stats, rq, noexec=False):
2041 runQueueEvent.__init__(self, task, stats, rq)
2042 self.noexec = noexec
2043
2044class sceneQueueTaskStarted(sceneQueueEvent):
2045 """
2046 Event notifing a setscene task was started
2047 """
2048 def __init__(self, task, stats, rq, noexec=False):
2049 sceneQueueEvent.__init__(self, task, stats, rq)
2050 self.noexec = noexec
2051
2052class runQueueTaskFailed(runQueueEvent):
2053 """
2054 Event notifing a task failed
2055 """
2056 def __init__(self, task, stats, exitcode, rq):
2057 runQueueEvent.__init__(self, task, stats, rq)
2058 self.exitcode = exitcode
2059
2060class sceneQueueTaskFailed(sceneQueueEvent):
2061 """
2062 Event notifing a setscene task failed
2063 """
2064 def __init__(self, task, stats, exitcode, rq):
2065 sceneQueueEvent.__init__(self, task, stats, rq)
2066 self.exitcode = exitcode
2067
2068class runQueueTaskCompleted(runQueueEvent):
2069 """
2070 Event notifing a task completed
2071 """
2072
2073class sceneQueueTaskCompleted(sceneQueueEvent):
2074 """
2075 Event notifing a setscene task completed
2076 """
2077
2078class runQueueTaskSkipped(runQueueEvent):
2079 """
2080 Event notifing a task was skipped
2081 """
2082 def __init__(self, task, stats, rq, reason):
2083 runQueueEvent.__init__(self, task, stats, rq)
2084 self.reason = reason
2085
2086class runQueuePipe():
2087 """
2088 Abstraction for a pipe between a worker thread and the server
2089 """
2090 def __init__(self, pipein, pipeout, d, rq, rqexec):
2091 self.input = pipein
2092 if pipeout:
2093 pipeout.close()
2094 bb.utils.nonblockingfd(self.input)
2095 self.queue = ""
2096 self.d = d
2097 self.rq = rq
2098 self.rqexec = rqexec
2099
2100 def setrunqueueexec(self, rqexec):
2101 self.rqexec = rqexec
2102
2103 def read(self):
2104 for w in [self.rq.worker, self.rq.fakeworker]:
2105 if not w:
2106 continue
2107 w.poll()
2108 if w.returncode is not None and not self.rq.teardown:
2109 name = None
2110 if self.rq.worker and w.pid == self.rq.worker.pid:
2111 name = "Worker"
2112 elif self.rq.fakeworker and w.pid == self.rq.fakeworker.pid:
2113 name = "Fakeroot"
2114 bb.error("%s process (%s) exited unexpectedly (%s), shutting down..." % (name, w.pid, str(w.returncode)))
2115 self.rq.finish_runqueue(True)
2116
2117 start = len(self.queue)
2118 try:
2119 self.queue = self.queue + self.input.read(102400)
2120 except (OSError, IOError) as e:
2121 if e.errno != errno.EAGAIN:
2122 raise
2123 end = len(self.queue)
2124 found = True
2125 while found and len(self.queue):
2126 found = False
2127 index = self.queue.find("</event>")
2128 while index != -1 and self.queue.startswith("<event>"):
2129 try:
2130 event = pickle.loads(self.queue[7:index])
2131 except ValueError as e:
2132 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[7:index]))
2133 bb.event.fire_from_worker(event, self.d)
2134 found = True
2135 self.queue = self.queue[index+8:]
2136 index = self.queue.find("</event>")
2137 index = self.queue.find("</exitcode>")
2138 while index != -1 and self.queue.startswith("<exitcode>"):
2139 try:
2140 task, status = pickle.loads(self.queue[10:index])
2141 except ValueError as e:
2142 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index]))
2143 self.rqexec.runqueue_process_waitpid(task, status)
2144 found = True
2145 self.queue = self.queue[index+11:]
2146 index = self.queue.find("</exitcode>")
2147 return (end > start)
2148
2149 def close(self):
2150 while self.read():
2151 continue
2152 if len(self.queue) > 0:
2153 print("Warning, worker left partial message: %s" % self.queue)
2154 self.input.close()
diff --git a/bitbake/lib/bb/server/__init__.py b/bitbake/lib/bb/server/__init__.py
new file mode 100644
index 0000000000..da5e480740
--- /dev/null
+++ b/bitbake/lib/bb/server/__init__.py
@@ -0,0 +1,96 @@
1#
2# BitBake Base Server Code
3#
4# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
5# Copyright (C) 2006 - 2008 Richard Purdie
6# Copyright (C) 2013 Alexandru Damian
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21""" Base code for Bitbake server process
22
23Have a common base for that all Bitbake server classes ensures a consistent
24approach to the interface, and minimize risks associated with code duplication.
25
26"""
27
28""" BaseImplServer() the base class for all XXServer() implementations.
29
30 These classes contain the actual code that runs the server side, i.e.
31 listens for the commands and executes them. Although these implementations
32 contain all the data of the original bitbake command, i.e the cooker instance,
33 they may well run on a different process or even machine.
34
35"""
36
37class BaseImplServer():
38 def __init__(self):
39 self._idlefuns = {}
40
41 def addcooker(self, cooker):
42 self.cooker = cooker
43
44 def register_idle_function(self, function, data):
45 """Register a function to be called while the server is idle"""
46 assert hasattr(function, '__call__')
47 self._idlefuns[function] = data
48
49
50
51""" BitBakeBaseServerConnection class is the common ancestor to all
52 BitBakeServerConnection classes.
53
54 These classes control the remote server. The only command currently
55 implemented is the terminate() command.
56
57"""
58
59class BitBakeBaseServerConnection():
60 def __init__(self, serverImpl):
61 pass
62
63 def terminate(self):
64 pass
65
66
67""" BitBakeBaseServer class is the common ancestor to all Bitbake servers
68
69 Derive this class in order to implement a BitBakeServer which is the
70 controlling stub for the actual server implementation
71
72"""
73class BitBakeBaseServer(object):
74 def initServer(self):
75 self.serverImpl = None # we ensure a runtime crash if not overloaded
76 self.connection = None
77 return
78
79 def addcooker(self, cooker):
80 self.cooker = cooker
81 self.serverImpl.addcooker(cooker)
82
83 def getServerIdleCB(self):
84 return self.serverImpl.register_idle_function
85
86 def saveConnectionDetails(self):
87 return
88
89 def detach(self):
90 return
91
92 def establishConnection(self, featureset):
93 raise "Must redefine the %s.establishConnection()" % self.__class__.__name__
94
95 def endSession(self):
96 self.connection.terminate()
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
new file mode 100644
index 0000000000..577c2503ac
--- /dev/null
+++ b/bitbake/lib/bb/server/process.py
@@ -0,0 +1,236 @@
1#
2# BitBake Process based server.
3#
4# Copyright (C) 2010 Bob Foerster <robert@erafx.com>
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19"""
20 This module implements a multiprocessing.Process based server for bitbake.
21"""
22
23import bb
24import bb.event
25import itertools
26import logging
27import multiprocessing
28import os
29import signal
30import sys
31import time
32import select
33from Queue import Empty
34from multiprocessing import Event, Process, util, Queue, Pipe, queues, Manager
35
36from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
37
38logger = logging.getLogger('BitBake')
39
40class ServerCommunicator():
41 def __init__(self, connection, event_handle):
42 self.connection = connection
43 self.event_handle = event_handle
44
45 def runCommand(self, command):
46 # @todo try/except
47 self.connection.send(command)
48
49 while True:
50 # don't let the user ctrl-c while we're waiting for a response
51 try:
52 if self.connection.poll(20):
53 return self.connection.recv()
54 else:
55 bb.fatal("Timeout while attempting to communicate with bitbake server")
56 except KeyboardInterrupt:
57 pass
58
59 def getEventHandle(self):
60 return self.event_handle.value
61
62class EventAdapter():
63 """
64 Adapter to wrap our event queue since the caller (bb.event) expects to
65 call a send() method, but our actual queue only has put()
66 """
67 def __init__(self, queue):
68 self.queue = queue
69
70 def send(self, event):
71 try:
72 self.queue.put(event)
73 except Exception as err:
74 print("EventAdapter puked: %s" % str(err))
75
76
77class ProcessServer(Process, BaseImplServer):
78 profile_filename = "profile.log"
79 profile_processed_filename = "profile.log.processed"
80
81 def __init__(self, command_channel, event_queue, featurelist):
82 BaseImplServer.__init__(self)
83 Process.__init__(self)
84 self.command_channel = command_channel
85 self.event_queue = event_queue
86 self.event = EventAdapter(event_queue)
87 self.featurelist = featurelist
88 self.quit = False
89
90 self.quitin, self.quitout = Pipe()
91 self.event_handle = multiprocessing.Value("i")
92
93 def run(self):
94 for event in bb.event.ui_queue:
95 self.event_queue.put(event)
96 self.event_handle.value = bb.event.register_UIHhandler(self)
97
98 bb.cooker.server_main(self.cooker, self.main)
99
100 def main(self):
101 # Ignore SIGINT within the server, as all SIGINT handling is done by
102 # the UI and communicated to us
103 self.quitin.close()
104 signal.signal(signal.SIGINT, signal.SIG_IGN)
105 while not self.quit:
106 try:
107 if self.command_channel.poll():
108 command = self.command_channel.recv()
109 self.runCommand(command)
110 if self.quitout.poll():
111 self.quitout.recv()
112 self.quit = True
113
114 self.idle_commands(.1, [self.event_queue._reader, self.command_channel, self.quitout])
115 except Exception:
116 logger.exception('Running command %s', command)
117
118 self.event_queue.close()
119 bb.event.unregister_UIHhandler(self.event_handle.value)
120 self.command_channel.close()
121 self.cooker.shutdown(True)
122
123 def idle_commands(self, delay, fds = []):
124 nextsleep = delay
125
126 for function, data in self._idlefuns.items():
127 try:
128 retval = function(self, data, False)
129 if retval is False:
130 del self._idlefuns[function]
131 nextsleep = None
132 elif retval is True:
133 nextsleep = None
134 elif nextsleep is None:
135 continue
136 else:
137 fds = fds + retval
138 except SystemExit:
139 raise
140 except Exception:
141 logger.exception('Running idle function')
142
143 if nextsleep is not None:
144 select.select(fds,[],[],nextsleep)
145
146 def runCommand(self, command):
147 """
148 Run a cooker command on the server
149 """
150 self.command_channel.send(self.cooker.command.runCommand(command))
151
152 def stop(self):
153 self.quitin.send("quit")
154 self.quitin.close()
155
156class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
157 def __init__(self, serverImpl, ui_channel, event_queue):
158 self.procserver = serverImpl
159 self.ui_channel = ui_channel
160 self.event_queue = event_queue
161 self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle)
162 self.events = self.event_queue
163
164 def sigterm_terminate(self):
165 bb.error("UI received SIGTERM")
166 self.terminate()
167
168 def terminate(self):
169 def flushevents():
170 while True:
171 try:
172 event = self.event_queue.get(block=False)
173 except (Empty, IOError):
174 break
175 if isinstance(event, logging.LogRecord):
176 logger.handle(event)
177
178 signal.signal(signal.SIGINT, signal.SIG_IGN)
179 self.procserver.stop()
180
181 while self.procserver.is_alive():
182 flushevents()
183 self.procserver.join(0.1)
184
185 self.ui_channel.close()
186 self.event_queue.close()
187 self.event_queue.setexit()
188
189# Wrap Queue to provide API which isn't server implementation specific
190class ProcessEventQueue(multiprocessing.queues.Queue):
191 def __init__(self, maxsize):
192 multiprocessing.queues.Queue.__init__(self, maxsize)
193 self.exit = False
194
195 def setexit(self):
196 self.exit = True
197
198 def waitEvent(self, timeout):
199 if self.exit:
200 raise KeyboardInterrupt()
201 try:
202 return self.get(True, timeout)
203 except Empty:
204 return None
205
206 def getEvent(self):
207 try:
208 return self.get(False)
209 except Empty:
210 return None
211
212
213class BitBakeServer(BitBakeBaseServer):
214 def initServer(self):
215 # establish communication channels. We use bidirectional pipes for
216 # ui <--> server command/response pairs
217 # and a queue for server -> ui event notifications
218 #
219 self.ui_channel, self.server_channel = Pipe()
220 self.event_queue = ProcessEventQueue(0)
221 self.serverImpl = ProcessServer(self.server_channel, self.event_queue, None)
222
223 def detach(self):
224 self.serverImpl.start()
225 return
226
227 def establishConnection(self, featureset):
228
229 self.connection = BitBakeProcessServerConnection(self.serverImpl, self.ui_channel, self.event_queue)
230
231 _, error = self.connection.connection.runCommand(["setFeatures", featureset])
232 if error:
233 logger.error("Unable to set the cooker to the correct featureset: %s" % error)
234 raise BaseException(error)
235 signal.signal(signal.SIGTERM, lambda i, s: self.connection.sigterm_terminate())
236 return self.connection
diff --git a/bitbake/lib/bb/server/xmlrpc.py b/bitbake/lib/bb/server/xmlrpc.py
new file mode 100644
index 0000000000..5dcaa6c7b0
--- /dev/null
+++ b/bitbake/lib/bb/server/xmlrpc.py
@@ -0,0 +1,392 @@
1#
2# BitBake XMLRPC Server
3#
4# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
5# Copyright (C) 2006 - 2008 Richard Purdie
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20"""
21 This module implements an xmlrpc server for BitBake.
22
23 Use this by deriving a class from BitBakeXMLRPCServer and then adding
24 methods which you want to "export" via XMLRPC. If the methods have the
25 prefix xmlrpc_, then registering those function will happen automatically,
26 if not, you need to call register_function.
27
28 Use register_idle_function() to add a function which the xmlrpc server
29 calls from within server_forever when no requests are pending. Make sure
30 that those functions are non-blocking or else you will introduce latency
31 in the server's main loop.
32"""
33
34import bb
35import xmlrpclib, sys
36from bb import daemonize
37from bb.ui import uievent
38import hashlib, time
39import socket
40import os, signal
41import threading
42try:
43 import cPickle as pickle
44except ImportError:
45 import pickle
46
47DEBUG = False
48
49from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
50import inspect, select, httplib
51
52from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
53
54class BBTransport(xmlrpclib.Transport):
55 def __init__(self, timeout):
56 self.timeout = timeout
57 self.connection_token = None
58 xmlrpclib.Transport.__init__(self)
59
60 # Modified from default to pass timeout to HTTPConnection
61 def make_connection(self, host):
62 #return an existing connection if possible. This allows
63 #HTTP/1.1 keep-alive.
64 if self._connection and host == self._connection[0]:
65 return self._connection[1]
66
67 # create a HTTP connection object from a host descriptor
68 chost, self._extra_headers, x509 = self.get_host_info(host)
69 #store the host argument along with the connection object
70 self._connection = host, httplib.HTTPConnection(chost, timeout=self.timeout)
71 return self._connection[1]
72
73 def set_connection_token(self, token):
74 self.connection_token = token
75
76 def send_content(self, h, body):
77 if self.connection_token:
78 h.putheader("Bitbake-token", self.connection_token)
79 xmlrpclib.Transport.send_content(self, h, body)
80
81def _create_server(host, port, timeout = 60):
82 t = BBTransport(timeout)
83 s = xmlrpclib.Server("http://%s:%d/" % (host, port), transport=t, allow_none=True)
84 return s, t
85
86class BitBakeServerCommands():
87
88 def __init__(self, server):
89 self.server = server
90 self.has_client = False
91
92 def registerEventHandler(self, host, port):
93 """
94 Register a remote UI Event Handler
95 """
96 s, t = _create_server(host, port)
97
98 # we don't allow connections if the cooker is running
99 if (self.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]):
100 return None
101
102 self.event_handle = bb.event.register_UIHhandler(s)
103 return self.event_handle
104
105 def unregisterEventHandler(self, handlerNum):
106 """
107 Unregister a remote UI Event Handler
108 """
109 return bb.event.unregister_UIHhandler(handlerNum)
110
111 def runCommand(self, command):
112 """
113 Run a cooker command on the server
114 """
115 return self.cooker.command.runCommand(command, self.server.readonly)
116
117 def getEventHandle(self):
118 return self.event_handle
119
120 def terminateServer(self):
121 """
122 Trigger the server to quit
123 """
124 self.server.quit = True
125 print("Server (cooker) exiting")
126 return
127
128 def addClient(self):
129 if self.has_client:
130 return None
131 token = hashlib.md5(str(time.time())).hexdigest()
132 self.server.set_connection_token(token)
133 self.has_client = True
134 return token
135
136 def removeClient(self):
137 if self.has_client:
138 self.server.set_connection_token(None)
139 self.has_client = False
140 if self.server.single_use:
141 self.server.quit = True
142
143# This request handler checks if the request has a "Bitbake-token" header
144# field (this comes from the client side) and compares it with its internal
145# "Bitbake-token" field (this comes from the server). If the two are not
146# equal, it is assumed that a client is trying to connect to the server
147# while another client is connected to the server. In this case, a 503 error
148# ("service unavailable") is returned to the client.
149class BitBakeXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
150 def __init__(self, request, client_address, server):
151 self.server = server
152 SimpleXMLRPCRequestHandler.__init__(self, request, client_address, server)
153
154 def do_POST(self):
155 try:
156 remote_token = self.headers["Bitbake-token"]
157 except:
158 remote_token = None
159 if remote_token != self.server.connection_token and remote_token != "observer":
160 self.report_503()
161 else:
162 if remote_token == "observer":
163 self.server.readonly = True
164 else:
165 self.server.readonly = False
166 SimpleXMLRPCRequestHandler.do_POST(self)
167
168 def report_503(self):
169 self.send_response(503)
170 response = 'No more client allowed'
171 self.send_header("Content-type", "text/plain")
172 self.send_header("Content-length", str(len(response)))
173 self.end_headers()
174 self.wfile.write(response)
175
176
177class XMLRPCProxyServer(BaseImplServer):
178 """ not a real working server, but a stub for a proxy server connection
179
180 """
181 def __init__(self, host, port):
182 self.host = host
183 self.port = port
184
185class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
186 # remove this when you're done with debugging
187 # allow_reuse_address = True
188
189 def __init__(self, interface):
190 """
191 Constructor
192 """
193 BaseImplServer.__init__(self)
194 if (interface[1] == 0): # anonymous port, not getting reused
195 self.single_use = True
196 # Use auto port configuration
197 if (interface[1] == -1):
198 interface = (interface[0], 0)
199 SimpleXMLRPCServer.__init__(self, interface,
200 requestHandler=BitBakeXMLRPCRequestHandler,
201 logRequests=False, allow_none=True)
202 self.host, self.port = self.socket.getsockname()
203 self.connection_token = None
204 #self.register_introspection_functions()
205 self.commands = BitBakeServerCommands(self)
206 self.autoregister_all_functions(self.commands, "")
207 self.interface = interface
208 self.single_use = False
209
210 def addcooker(self, cooker):
211 BaseImplServer.addcooker(self, cooker)
212 self.commands.cooker = cooker
213
214 def autoregister_all_functions(self, context, prefix):
215 """
216 Convenience method for registering all functions in the scope
217 of this class that start with a common prefix
218 """
219 methodlist = inspect.getmembers(context, inspect.ismethod)
220 for name, method in methodlist:
221 if name.startswith(prefix):
222 self.register_function(method, name[len(prefix):])
223
224
225 def serve_forever(self):
226 # Start the actual XMLRPC server
227 bb.cooker.server_main(self.cooker, self._serve_forever)
228
229 def _serve_forever(self):
230 """
231 Serve Requests. Overloaded to honor a quit command
232 """
233 self.quit = False
234 while not self.quit:
235 fds = [self]
236 nextsleep = 0.1
237 for function, data in self._idlefuns.items():
238 try:
239 retval = function(self, data, False)
240 if retval is False:
241 del self._idlefuns[function]
242 elif retval is True:
243 nextsleep = 0
244 else:
245 fds = fds + retval
246 except SystemExit:
247 raise
248 except:
249 import traceback
250 traceback.print_exc()
251 pass
252
253 socktimeout = self.socket.gettimeout() or nextsleep
254 socktimeout = min(socktimeout, nextsleep)
255 # Mirror what BaseServer handle_request would do
256 fd_sets = select.select(fds, [], [], socktimeout)
257 if fd_sets[0] and self in fd_sets[0]:
258 self._handle_request_noblock()
259
260 # Tell idle functions we're exiting
261 for function, data in self._idlefuns.items():
262 try:
263 retval = function(self, data, True)
264 except:
265 pass
266 self.server_close()
267 return
268
269 def set_connection_token(self, token):
270 self.connection_token = token
271
272class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
273 def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = []):
274 self.connection, self.transport = _create_server(serverImpl.host, serverImpl.port)
275 self.clientinfo = clientinfo
276 self.serverImpl = serverImpl
277 self.observer_only = observer_only
278 self.featureset = featureset
279
280 def connect(self):
281 if not self.observer_only:
282 token = self.connection.addClient()
283 else:
284 token = "observer"
285 if token is None:
286 return None
287 self.transport.set_connection_token(token)
288
289 self.events = uievent.BBUIEventQueue(self.connection, self.clientinfo)
290 for event in bb.event.ui_queue:
291 self.events.queue_event(event)
292
293 _, error = self.connection.runCommand(["setFeatures", self.featureset])
294 if error:
295 # no need to log it here, the error shall be sent to the client
296 raise BaseException(error)
297
298 return self
299
300 def removeClient(self):
301 if not self.observer_only:
302 self.connection.removeClient()
303
304 def terminate(self):
305 # Don't wait for server indefinitely
306 import socket
307 socket.setdefaulttimeout(2)
308 try:
309 self.events.system_quit()
310 except:
311 pass
312 try:
313 self.connection.removeClient()
314 except:
315 pass
316
317class BitBakeServer(BitBakeBaseServer):
318 def initServer(self, interface = ("localhost", 0)):
319 self.interface = interface
320 self.serverImpl = XMLRPCServer(interface)
321
322 def detach(self):
323 daemonize.createDaemon(self.serverImpl.serve_forever, "bitbake-cookerdaemon.log")
324 del self.cooker
325
326 def establishConnection(self, featureset):
327 self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, self.interface, False, featureset)
328 return self.connection.connect()
329
330 def set_connection_token(self, token):
331 self.connection.transport.set_connection_token(token)
332
333class BitBakeXMLRPCClient(BitBakeBaseServer):
334
335 def __init__(self, observer_only = False):
336 self.observer_only = observer_only
337 # if we need extra caches, just tell the server to load them all
338 pass
339
340 def saveConnectionDetails(self, remote):
341 self.remote = remote
342
343 def saveConnectionConfigParams(self, configParams):
344 self.configParams = configParams
345
346 def establishConnection(self, featureset):
347 # The format of "remote" must be "server:port"
348 try:
349 [host, port] = self.remote.split(":")
350 port = int(port)
351 except Exception as e:
352 bb.fatal("Failed to read remote definition (%s)" % str(e))
353
354 # use automatic port if port set to -1, meaning read it from
355 # the bitbake.lock file
356 if port == -1:
357 lock_location = "%s/bitbake.lock" % self.configParams.environment.get('BUILDDIR')
358 lock = bb.utils.lockfile(lock_location, False, False)
359 if lock:
360 # This means there is no server running which we can
361 # connect to on the local system.
362 bb.utils.unlockfile(lock)
363 return None
364
365 try:
366 lf = open(lock_location, 'r')
367 remotedef = lf.readline()
368 [host, port] = remotedef.split(":")
369 port = int(port)
370 lf.close()
371 self.remote = remotedef
372 except Exception as e:
373 bb.fatal("Failed to read bitbake.lock (%s)" % str(e))
374
375 # We need our IP for the server connection. We get the IP
376 # by trying to connect with the server
377 try:
378 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
379 s.connect((host, port))
380 ip = s.getsockname()[0]
381 s.close()
382 except Exception as e:
383 bb.fatal("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
384 try:
385 self.serverImpl = XMLRPCProxyServer(host, port)
386 self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
387 return self.connection.connect()
388 except Exception as e:
389 bb.fatal("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
390
391 def endSession(self):
392 self.connection.removeClient()
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py
new file mode 100644
index 0000000000..1dd8d54bdb
--- /dev/null
+++ b/bitbake/lib/bb/shell.py
@@ -0,0 +1,820 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3##########################################################################
4#
5# Copyright (C) 2005-2006 Michael 'Mickey' Lauer <mickey@Vanille.de>
6# Copyright (C) 2005-2006 Vanille Media
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21##########################################################################
22#
23# Thanks to:
24# * Holger Freyther <zecke@handhelds.org>
25# * Justin Patrin <papercrane@reversefold.com>
26#
27##########################################################################
28
29"""
30BitBake Shell
31
32IDEAS:
33 * list defined tasks per package
34 * list classes
35 * toggle force
36 * command to reparse just one (or more) bbfile(s)
37 * automatic check if reparsing is necessary (inotify?)
38 * frontend for bb file manipulation
39 * more shell-like features:
40 - output control, i.e. pipe output into grep, sort, etc.
41 - job control, i.e. bring running commands into background and foreground
42 * start parsing in background right after startup
43 * ncurses interface
44
45PROBLEMS:
46 * force doesn't always work
47 * readline completion for commands with more than one parameters
48
49"""
50
51##########################################################################
52# Import and setup global variables
53##########################################################################
54
55from __future__ import print_function
56from functools import reduce
57try:
58 set
59except NameError:
60 from sets import Set as set
61import sys, os, readline, socket, httplib, urllib, commands, popen2, shlex, Queue, fnmatch
62from bb import data, parse, build, cache, taskdata, runqueue, providers as Providers
63
64__version__ = "0.5.3.1"
65__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
66Type 'help' for more information, press CTRL-D to exit.""" % __version__
67
68cmds = {}
69leave_mainloop = False
70last_exception = None
71cooker = None
72parsed = False
73debug = os.environ.get( "BBSHELL_DEBUG", "" )
74
75##########################################################################
76# Class BitBakeShellCommands
77##########################################################################
78
79class BitBakeShellCommands:
80 """This class contains the valid commands for the shell"""
81
82 def __init__( self, shell ):
83 """Register all the commands"""
84 self._shell = shell
85 for attr in BitBakeShellCommands.__dict__:
86 if not attr.startswith( "_" ):
87 if attr.endswith( "_" ):
88 command = attr[:-1].lower()
89 else:
90 command = attr[:].lower()
91 method = getattr( BitBakeShellCommands, attr )
92 debugOut( "registering command '%s'" % command )
93 # scan number of arguments
94 usage = getattr( method, "usage", "" )
95 if usage != "<...>":
96 numArgs = len( usage.split() )
97 else:
98 numArgs = -1
99 shell.registerCommand( command, method, numArgs, "%s %s" % ( command, usage ), method.__doc__ )
100
101 def _checkParsed( self ):
102 if not parsed:
103 print("SHELL: This command needs to parse bbfiles...")
104 self.parse( None )
105
106 def _findProvider( self, item ):
107 self._checkParsed()
108 # Need to use taskData for this information
109 preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
110 if not preferred: preferred = item
111 try:
112 lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
113 except KeyError:
114 if item in cooker.status.providers:
115 pf = cooker.status.providers[item][0]
116 else:
117 pf = None
118 return pf
119
120 def alias( self, params ):
121 """Register a new name for a command"""
122 new, old = params
123 if not old in cmds:
124 print("ERROR: Command '%s' not known" % old)
125 else:
126 cmds[new] = cmds[old]
127 print("OK")
128 alias.usage = "<alias> <command>"
129
130 def buffer( self, params ):
131 """Dump specified output buffer"""
132 index = params[0]
133 print(self._shell.myout.buffer( int( index ) ))
134 buffer.usage = "<index>"
135
136 def buffers( self, params ):
137 """Show the available output buffers"""
138 commands = self._shell.myout.bufferedCommands()
139 if not commands:
140 print("SHELL: No buffered commands available yet. Start doing something.")
141 else:
142 print("="*35, "Available Output Buffers", "="*27)
143 for index, cmd in enumerate( commands ):
144 print("| %s %s" % ( str( index ).ljust( 3 ), cmd ))
145 print("="*88)
146
147 def build( self, params, cmd = "build" ):
148 """Build a providee"""
149 global last_exception
150 globexpr = params[0]
151 self._checkParsed()
152 names = globfilter( cooker.status.pkg_pn, globexpr )
153 if len( names ) == 0: names = [ globexpr ]
154 print("SHELL: Building %s" % ' '.join( names ))
155
156 td = taskdata.TaskData(cooker.configuration.abort)
157 localdata = data.createCopy(cooker.configuration.data)
158 data.update_data(localdata)
159 data.expandKeys(localdata)
160
161 try:
162 tasks = []
163 for name in names:
164 td.add_provider(localdata, cooker.status, name)
165 providers = td.get_provider(name)
166
167 if len(providers) == 0:
168 raise Providers.NoProvider
169
170 tasks.append([name, "do_%s" % cmd])
171
172 td.add_unresolved(localdata, cooker.status)
173
174 rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
175 rq.prepare_runqueue()
176 rq.execute_runqueue()
177
178 except Providers.NoProvider:
179 print("ERROR: No Provider")
180 last_exception = Providers.NoProvider
181
182 except runqueue.TaskFailure as fnids:
183 last_exception = runqueue.TaskFailure
184
185 except build.FuncFailed as e:
186 print("ERROR: Couldn't build '%s'" % names)
187 last_exception = e
188
189
190 build.usage = "<providee>"
191
192 def clean( self, params ):
193 """Clean a providee"""
194 self.build( params, "clean" )
195 clean.usage = "<providee>"
196
197 def compile( self, params ):
198 """Execute 'compile' on a providee"""
199 self.build( params, "compile" )
200 compile.usage = "<providee>"
201
202 def configure( self, params ):
203 """Execute 'configure' on a providee"""
204 self.build( params, "configure" )
205 configure.usage = "<providee>"
206
207 def install( self, params ):
208 """Execute 'install' on a providee"""
209 self.build( params, "install" )
210 install.usage = "<providee>"
211
212 def edit( self, params ):
213 """Call $EDITOR on a providee"""
214 name = params[0]
215 bbfile = self._findProvider( name )
216 if bbfile is not None:
217 os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) )
218 else:
219 print("ERROR: Nothing provides '%s'" % name)
220 edit.usage = "<providee>"
221
222 def environment( self, params ):
223 """Dump out the outer BitBake environment"""
224 cooker.showEnvironment()
225
226 def exit_( self, params ):
227 """Leave the BitBake Shell"""
228 debugOut( "setting leave_mainloop to true" )
229 global leave_mainloop
230 leave_mainloop = True
231
232 def fetch( self, params ):
233 """Fetch a providee"""
234 self.build( params, "fetch" )
235 fetch.usage = "<providee>"
236
237 def fileBuild( self, params, cmd = "build" ):
238 """Parse and build a .bb file"""
239 global last_exception
240 name = params[0]
241 bf = completeFilePath( name )
242 print("SHELL: Calling '%s' on '%s'" % ( cmd, bf ))
243
244 try:
245 cooker.buildFile(bf, cmd)
246 except parse.ParseError:
247 print("ERROR: Unable to open or parse '%s'" % bf)
248 except build.FuncFailed as e:
249 print("ERROR: Couldn't build '%s'" % name)
250 last_exception = e
251
252 fileBuild.usage = "<bbfile>"
253
254 def fileClean( self, params ):
255 """Clean a .bb file"""
256 self.fileBuild( params, "clean" )
257 fileClean.usage = "<bbfile>"
258
259 def fileEdit( self, params ):
260 """Call $EDITOR on a .bb file"""
261 name = params[0]
262 os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), completeFilePath( name ) ) )
263 fileEdit.usage = "<bbfile>"
264
265 def fileRebuild( self, params ):
266 """Rebuild (clean & build) a .bb file"""
267 self.fileBuild( params, "rebuild" )
268 fileRebuild.usage = "<bbfile>"
269
270 def fileReparse( self, params ):
271 """(re)Parse a bb file"""
272 bbfile = params[0]
273 print("SHELL: Parsing '%s'" % bbfile)
274 parse.update_mtime( bbfile )
275 cooker.parser.reparse(bbfile)
276 if False: #fromCache:
277 print("SHELL: File has not been updated, not reparsing")
278 else:
279 print("SHELL: Parsed")
280 fileReparse.usage = "<bbfile>"
281
282 def abort( self, params ):
283 """Toggle abort task execution flag (see bitbake -k)"""
284 cooker.configuration.abort = not cooker.configuration.abort
285 print("SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort ))
286
287 def force( self, params ):
288 """Toggle force task execution flag (see bitbake -f)"""
289 cooker.configuration.force = not cooker.configuration.force
290 print("SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force ))
291
292 def help( self, params ):
293 """Show a comprehensive list of commands and their purpose"""
294 print("="*30, "Available Commands", "="*30)
295 for cmd in sorted(cmds):
296 function, numparams, usage, helptext = cmds[cmd]
297 print("| %s | %s" % (usage.ljust(30), helptext))
298 print("="*78)
299
300 def lastError( self, params ):
301 """Show the reason or log that was produced by the last BitBake event exception"""
302 if last_exception is None:
303 print("SHELL: No Errors yet (Phew)...")
304 else:
305 reason, event = last_exception.args
306 print("SHELL: Reason for the last error: '%s'" % reason)
307 if ':' in reason:
308 msg, filename = reason.split( ':' )
309 filename = filename.strip()
310 print("SHELL: Dumping log file for last error:")
311 try:
312 print(open( filename ).read())
313 except IOError:
314 print("ERROR: Couldn't open '%s'" % filename)
315
316 def match( self, params ):
317 """Dump all files or providers matching a glob expression"""
318 what, globexpr = params
319 if what == "files":
320 self._checkParsed()
321 for key in globfilter( cooker.status.pkg_fn, globexpr ): print(key)
322 elif what == "providers":
323 self._checkParsed()
324 for key in globfilter( cooker.status.pkg_pn, globexpr ): print(key)
325 else:
326 print("Usage: match %s" % self.print_.usage)
327 match.usage = "<files|providers> <glob>"
328
329 def new( self, params ):
330 """Create a new .bb file and open the editor"""
331 dirname, filename = params
332 packages = '/'.join( data.getVar( "BBFILES", cooker.configuration.data, 1 ).split('/')[:-2] )
333 fulldirname = "%s/%s" % ( packages, dirname )
334
335 if not os.path.exists( fulldirname ):
336 print("SHELL: Creating '%s'" % fulldirname)
337 os.mkdir( fulldirname )
338 if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ):
339 if os.path.exists( "%s/%s" % ( fulldirname, filename ) ):
340 print("SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename ))
341 return False
342 print("SHELL: Creating '%s/%s'" % ( fulldirname, filename ))
343 newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" )
344 print("""DESCRIPTION = ""
345SECTION = ""
346AUTHOR = ""
347HOMEPAGE = ""
348MAINTAINER = ""
349LICENSE = "GPL"
350PR = "r0"
351
352SRC_URI = ""
353
354#inherit base
355
356#do_configure() {
357#
358#}
359
360#do_compile() {
361#
362#}
363
364#do_stage() {
365#
366#}
367
368#do_install() {
369#
370#}
371""", file=newpackage)
372 newpackage.close()
373 os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) )
374 new.usage = "<directory> <filename>"
375
376 def package( self, params ):
377 """Execute 'package' on a providee"""
378 self.build( params, "package" )
379 package.usage = "<providee>"
380
381 def pasteBin( self, params ):
382 """Send a command + output buffer to the pastebin at http://rafb.net/paste"""
383 index = params[0]
384 contents = self._shell.myout.buffer( int( index ) )
385 sendToPastebin( "output of " + params[0], contents )
386 pasteBin.usage = "<index>"
387
388 def pasteLog( self, params ):
389 """Send the last event exception error log (if there is one) to http://rafb.net/paste"""
390 if last_exception is None:
391 print("SHELL: No Errors yet (Phew)...")
392 else:
393 reason, event = last_exception.args
394 print("SHELL: Reason for the last error: '%s'" % reason)
395 if ':' in reason:
396 msg, filename = reason.split( ':' )
397 filename = filename.strip()
398 print("SHELL: Pasting log file to pastebin...")
399
400 file = open( filename ).read()
401 sendToPastebin( "contents of " + filename, file )
402
403 def patch( self, params ):
404 """Execute 'patch' command on a providee"""
405 self.build( params, "patch" )
406 patch.usage = "<providee>"
407
408 def parse( self, params ):
409 """(Re-)parse .bb files and calculate the dependency graph"""
410 cooker.status = cache.CacheData(cooker.caches_array)
411 ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or ""
412 cooker.status.ignored_dependencies = set( ignore.split() )
413 cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) )
414
415 (filelist, masked) = cooker.collect_bbfiles()
416 cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback)
417 cooker.buildDepgraph()
418 global parsed
419 parsed = True
420 print()
421
422 def reparse( self, params ):
423 """(re)Parse a providee's bb file"""
424 bbfile = self._findProvider( params[0] )
425 if bbfile is not None:
426 print("SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] ))
427 self.fileReparse( [ bbfile ] )
428 else:
429 print("ERROR: Nothing provides '%s'" % params[0])
430 reparse.usage = "<providee>"
431
432 def getvar( self, params ):
433 """Dump the contents of an outer BitBake environment variable"""
434 var = params[0]
435 value = data.getVar( var, cooker.configuration.data, 1 )
436 print(value)
437 getvar.usage = "<variable>"
438
439 def peek( self, params ):
440 """Dump contents of variable defined in providee's metadata"""
441 name, var = params
442 bbfile = self._findProvider( name )
443 if bbfile is not None:
444 the_data = cache.Cache.loadDataFull(bbfile, cooker.configuration.data)
445 value = the_data.getVar( var, 1 )
446 print(value)
447 else:
448 print("ERROR: Nothing provides '%s'" % name)
449 peek.usage = "<providee> <variable>"
450
451 def poke( self, params ):
452 """Set contents of variable defined in providee's metadata"""
453 name, var, value = params
454 bbfile = self._findProvider( name )
455 if bbfile is not None:
456 print("ERROR: Sorry, this functionality is currently broken")
457 #d = cooker.pkgdata[bbfile]
458 #data.setVar( var, value, d )
459
460 # mark the change semi persistant
461 #cooker.pkgdata.setDirty(bbfile, d)
462 #print "OK"
463 else:
464 print("ERROR: Nothing provides '%s'" % name)
465 poke.usage = "<providee> <variable> <value>"
466
467 def print_( self, params ):
468 """Dump all files or providers"""
469 what = params[0]
470 if what == "files":
471 self._checkParsed()
472 for key in cooker.status.pkg_fn: print(key)
473 elif what == "providers":
474 self._checkParsed()
475 for key in cooker.status.providers: print(key)
476 else:
477 print("Usage: print %s" % self.print_.usage)
478 print_.usage = "<files|providers>"
479
480 def python( self, params ):
481 """Enter the expert mode - an interactive BitBake Python Interpreter"""
482 sys.ps1 = "EXPERT BB>>> "
483 sys.ps2 = "EXPERT BB... "
484 import code
485 interpreter = code.InteractiveConsole( dict( globals() ) )
486 interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version )
487
488 def showdata( self, params ):
489 """Execute 'showdata' on a providee"""
490 cooker.showEnvironment(None, params)
491 showdata.usage = "<providee>"
492
493 def setVar( self, params ):
494 """Set an outer BitBake environment variable"""
495 var, value = params
496 data.setVar( var, value, cooker.configuration.data )
497 print("OK")
498 setVar.usage = "<variable> <value>"
499
500 def rebuild( self, params ):
501 """Clean and rebuild a .bb file or a providee"""
502 self.build( params, "clean" )
503 self.build( params, "build" )
504 rebuild.usage = "<providee>"
505
506 def shell( self, params ):
507 """Execute a shell command and dump the output"""
508 if params != "":
509 print(commands.getoutput( " ".join( params ) ))
510 shell.usage = "<...>"
511
512 def stage( self, params ):
513 """Execute 'stage' on a providee"""
514 self.build( params, "populate_staging" )
515 stage.usage = "<providee>"
516
517 def status( self, params ):
518 """<just for testing>"""
519 print("-" * 78)
520 print("building list = '%s'" % cooker.building_list)
521 print("build path = '%s'" % cooker.build_path)
522 print("consider_msgs_cache = '%s'" % cooker.consider_msgs_cache)
523 print("build stats = '%s'" % cooker.stats)
524 if last_exception is not None: print("last_exception = '%s'" % repr( last_exception.args ))
525 print("memory output contents = '%s'" % self._shell.myout._buffer)
526
527 def test( self, params ):
528 """<just for testing>"""
529 print("testCommand called with '%s'" % params)
530
531 def unpack( self, params ):
532 """Execute 'unpack' on a providee"""
533 self.build( params, "unpack" )
534 unpack.usage = "<providee>"
535
536 def which( self, params ):
537 """Computes the providers for a given providee"""
538 # Need to use taskData for this information
539 item = params[0]
540
541 self._checkParsed()
542
543 preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
544 if not preferred: preferred = item
545
546 try:
547 lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
548 except KeyError:
549 lv, lf, pv, pf = (None,)*4
550
551 try:
552 providers = cooker.status.providers[item]
553 except KeyError:
554 print("SHELL: ERROR: Nothing provides", preferred)
555 else:
556 for provider in providers:
557 if provider == pf: provider = " (***) %s" % provider
558 else: provider = " %s" % provider
559 print(provider)
560 which.usage = "<providee>"
561
562##########################################################################
563# Common helper functions
564##########################################################################
565
566def completeFilePath( bbfile ):
567 """Get the complete bbfile path"""
568 if not cooker.status: return bbfile
569 if not cooker.status.pkg_fn: return bbfile
570 for key in cooker.status.pkg_fn:
571 if key.endswith( bbfile ):
572 return key
573 return bbfile
574
575def sendToPastebin( desc, content ):
576 """Send content to http://oe.pastebin.com"""
577 mydata = {}
578 mydata["lang"] = "Plain Text"
579 mydata["desc"] = desc
580 mydata["cvt_tabs"] = "No"
581 mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" )
582 mydata["text"] = content
583 params = urllib.urlencode( mydata )
584 headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
585
586 host = "rafb.net"
587 conn = httplib.HTTPConnection( "%s:80" % host )
588 conn.request("POST", "/paste/paste.php", params, headers )
589
590 response = conn.getresponse()
591 conn.close()
592
593 if response.status == 302:
594 location = response.getheader( "location" ) or "unknown"
595 print("SHELL: Pasted to http://%s%s" % ( host, location ))
596 else:
597 print("ERROR: %s %s" % ( response.status, response.reason ))
598
599def completer( text, state ):
600 """Return a possible readline completion"""
601 debugOut( "completer called with text='%s', state='%d'" % ( text, state ) )
602
603 if state == 0:
604 line = readline.get_line_buffer()
605 if " " in line:
606 line = line.split()
607 # we are in second (or more) argument
608 if line[0] in cmds and hasattr( cmds[line[0]][0], "usage" ): # known command and usage
609 u = getattr( cmds[line[0]][0], "usage" ).split()[0]
610 if u == "<variable>":
611 allmatches = cooker.configuration.data.keys()
612 elif u == "<bbfile>":
613 if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
614 else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn ]
615 elif u == "<providee>":
616 if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
617 else: allmatches = cooker.status.providers.iterkeys()
618 else: allmatches = [ "(No tab completion available for this command)" ]
619 else: allmatches = [ "(No tab completion available for this command)" ]
620 else:
621 # we are in first argument
622 allmatches = cmds.iterkeys()
623
624 completer.matches = [ x for x in allmatches if x[:len(text)] == text ]
625 #print "completer.matches = '%s'" % completer.matches
626 if len( completer.matches ) > state:
627 return completer.matches[state]
628 else:
629 return None
630
631def debugOut( text ):
632 if debug:
633 sys.stderr.write( "( %s )\n" % text )
634
635def columnize( alist, width = 80 ):
636 """
637 A word-wrap function that preserves existing line breaks
638 and most spaces in the text. Expects that existing line
639 breaks are posix newlines (\n).
640 """
641 return reduce(lambda line, word, width=width: '%s%s%s' %
642 (line,
643 ' \n'[(len(line[line.rfind('\n')+1:])
644 + len(word.split('\n', 1)[0]
645 ) >= width)],
646 word),
647 alist
648 )
649
650def globfilter( names, pattern ):
651 return fnmatch.filter( names, pattern )
652
653##########################################################################
654# Class MemoryOutput
655##########################################################################
656
657class MemoryOutput:
658 """File-like output class buffering the output of the last 10 commands"""
659 def __init__( self, delegate ):
660 self.delegate = delegate
661 self._buffer = []
662 self.text = []
663 self._command = None
664
665 def startCommand( self, command ):
666 self._command = command
667 self.text = []
668 def endCommand( self ):
669 if self._command is not None:
670 if len( self._buffer ) == 10: del self._buffer[0]
671 self._buffer.append( ( self._command, self.text ) )
672 def removeLast( self ):
673 if self._buffer:
674 del self._buffer[ len( self._buffer ) - 1 ]
675 self.text = []
676 self._command = None
677 def lastBuffer( self ):
678 if self._buffer:
679 return self._buffer[ len( self._buffer ) -1 ][1]
680 def bufferedCommands( self ):
681 return [ cmd for cmd, output in self._buffer ]
682 def buffer( self, i ):
683 if i < len( self._buffer ):
684 return "BB>> %s\n%s" % ( self._buffer[i][0], "".join( self._buffer[i][1] ) )
685 else: return "ERROR: Invalid buffer number. Buffer needs to be in (0, %d)" % ( len( self._buffer ) - 1 )
686 def write( self, text ):
687 if self._command is not None and text != "BB>> ": self.text.append( text )
688 if self.delegate is not None: self.delegate.write( text )
689 def flush( self ):
690 return self.delegate.flush()
691 def fileno( self ):
692 return self.delegate.fileno()
693 def isatty( self ):
694 return self.delegate.isatty()
695
696##########################################################################
697# Class BitBakeShell
698##########################################################################
699
700class BitBakeShell:
701
702 def __init__( self ):
703 """Register commands and set up readline"""
704 self.commandQ = Queue.Queue()
705 self.commands = BitBakeShellCommands( self )
706 self.myout = MemoryOutput( sys.stdout )
707 self.historyfilename = os.path.expanduser( "~/.bbsh_history" )
708 self.startupfilename = os.path.expanduser( "~/.bbsh_startup" )
709
710 readline.set_completer( completer )
711 readline.set_completer_delims( " " )
712 readline.parse_and_bind("tab: complete")
713
714 try:
715 readline.read_history_file( self.historyfilename )
716 except IOError:
717 pass # It doesn't exist yet.
718
719 print(__credits__)
720
721 def cleanup( self ):
722 """Write readline history and clean up resources"""
723 debugOut( "writing command history" )
724 try:
725 readline.write_history_file( self.historyfilename )
726 except:
727 print("SHELL: Unable to save command history")
728
729 def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ):
730 """Register a command"""
731 if usage == "": usage = command
732 if helptext == "": helptext = function.__doc__ or "<not yet documented>"
733 cmds[command] = ( function, numparams, usage, helptext )
734
735 def processCommand( self, command, params ):
736 """Process a command. Check number of params and print a usage string, if appropriate"""
737 debugOut( "processing command '%s'..." % command )
738 try:
739 function, numparams, usage, helptext = cmds[command]
740 except KeyError:
741 print("SHELL: ERROR: '%s' command is not a valid command." % command)
742 self.myout.removeLast()
743 else:
744 if (numparams != -1) and (not len( params ) == numparams):
745 print("Usage: '%s'" % usage)
746 return
747
748 result = function( self.commands, params )
749 debugOut( "result was '%s'" % result )
750
751 def processStartupFile( self ):
752 """Read and execute all commands found in $HOME/.bbsh_startup"""
753 if os.path.exists( self.startupfilename ):
754 startupfile = open( self.startupfilename, "r" )
755 for cmdline in startupfile:
756 debugOut( "processing startup line '%s'" % cmdline )
757 if not cmdline:
758 continue
759 if "|" in cmdline:
760 print("ERROR: '|' in startup file is not allowed. Ignoring line")
761 continue
762 self.commandQ.put( cmdline.strip() )
763
764 def main( self ):
765 """The main command loop"""
766 while not leave_mainloop:
767 try:
768 if self.commandQ.empty():
769 sys.stdout = self.myout.delegate
770 cmdline = raw_input( "BB>> " )
771 sys.stdout = self.myout
772 else:
773 cmdline = self.commandQ.get()
774 if cmdline:
775 allCommands = cmdline.split( ';' )
776 for command in allCommands:
777 pipecmd = None
778 #
779 # special case for expert mode
780 if command == 'python':
781 sys.stdout = self.myout.delegate
782 self.processCommand( command, "" )
783 sys.stdout = self.myout
784 else:
785 self.myout.startCommand( command )
786 if '|' in command: # disable output
787 command, pipecmd = command.split( '|' )
788 delegate = self.myout.delegate
789 self.myout.delegate = None
790 tokens = shlex.split( command, True )
791 self.processCommand( tokens[0], tokens[1:] or "" )
792 self.myout.endCommand()
793 if pipecmd is not None: # restore output
794 self.myout.delegate = delegate
795
796 pipe = popen2.Popen4( pipecmd )
797 pipe.tochild.write( "\n".join( self.myout.lastBuffer() ) )
798 pipe.tochild.close()
799 sys.stdout.write( pipe.fromchild.read() )
800 #
801 except EOFError:
802 print()
803 return
804 except KeyboardInterrupt:
805 print()
806
807##########################################################################
808# Start function - called from the BitBake command line utility
809##########################################################################
810
811def start( aCooker ):
812 global cooker
813 cooker = aCooker
814 bbshell = BitBakeShell()
815 bbshell.processStartupFile()
816 bbshell.main()
817 bbshell.cleanup()
818
819if __name__ == "__main__":
820 print("SHELL: Sorry, this program should only be called by BitBake.")
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
new file mode 100644
index 0000000000..a6d28597ed
--- /dev/null
+++ b/bitbake/lib/bb/siggen.py
@@ -0,0 +1,483 @@
1import hashlib
2import logging
3import os
4import re
5import tempfile
6import bb.data
7
8logger = logging.getLogger('BitBake.SigGen')
9
10try:
11 import cPickle as pickle
12except ImportError:
13 import pickle
14 logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
15
16def init(d):
17 siggens = [obj for obj in globals().itervalues()
18 if type(obj) is type and issubclass(obj, SignatureGenerator)]
19
20 desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
21 for sg in siggens:
22 if desired == sg.name:
23 return sg(d)
24 break
25 else:
26 logger.error("Invalid signature generator '%s', using default 'noop'\n"
27 "Available generators: %s", desired,
28 ', '.join(obj.name for obj in siggens))
29 return SignatureGenerator(d)
30
31class SignatureGenerator(object):
32 """
33 """
34 name = "noop"
35
36 def __init__(self, data):
37 self.taskhash = {}
38 self.runtaskdeps = {}
39 self.file_checksum_values = {}
40
41 def finalise(self, fn, d, varient):
42 return
43
44 def get_taskhash(self, fn, task, deps, dataCache):
45 return "0"
46
47 def set_taskdata(self, hashes, deps, checksum):
48 return
49
50 def stampfile(self, stampbase, file_name, taskname, extrainfo):
51 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
52
53 def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
54 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
55
56 def dump_sigtask(self, fn, task, stampbase, runtime):
57 return
58
59 def invalidate_task(self, task, d, fn):
60 bb.build.del_stamp(task, d, fn)
61
62 def dump_sigs(self, dataCache, options):
63 return
64
65class SignatureGeneratorBasic(SignatureGenerator):
66 """
67 """
68 name = "basic"
69
70 def __init__(self, data):
71 self.basehash = {}
72 self.taskhash = {}
73 self.taskdeps = {}
74 self.runtaskdeps = {}
75 self.file_checksum_values = {}
76 self.gendeps = {}
77 self.lookupcache = {}
78 self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
79 self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
80 self.taskwhitelist = None
81 self.init_rundepcheck(data)
82
83 def init_rundepcheck(self, data):
84 self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
85 if self.taskwhitelist:
86 self.twl = re.compile(self.taskwhitelist)
87 else:
88 self.twl = None
89
90 def _build_data(self, fn, d):
91
92 tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d)
93
94 taskdeps = {}
95 basehash = {}
96
97 for task in tasklist:
98 data = lookupcache[task]
99
100 if data is None:
101 bb.error("Task %s from %s seems to be empty?!" % (task, fn))
102 data = ''
103
104 gendeps[task] -= self.basewhitelist
105 newdeps = gendeps[task]
106 seen = set()
107 while newdeps:
108 nextdeps = newdeps
109 seen |= nextdeps
110 newdeps = set()
111 for dep in nextdeps:
112 if dep in self.basewhitelist:
113 continue
114 gendeps[dep] -= self.basewhitelist
115 newdeps |= gendeps[dep]
116 newdeps -= seen
117
118 alldeps = sorted(seen)
119 for dep in alldeps:
120 data = data + dep
121 var = lookupcache[dep]
122 if var is not None:
123 data = data + str(var)
124 self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
125 taskdeps[task] = alldeps
126
127 self.taskdeps[fn] = taskdeps
128 self.gendeps[fn] = gendeps
129 self.lookupcache[fn] = lookupcache
130
131 return taskdeps
132
133 def finalise(self, fn, d, variant):
134
135 if variant:
136 fn = "virtual:" + variant + ":" + fn
137
138 try:
139 taskdeps = self._build_data(fn, d)
140 except:
141 bb.note("Error during finalise of %s" % fn)
142 raise
143
144 #Slow but can be useful for debugging mismatched basehashes
145 #for task in self.taskdeps[fn]:
146 # self.dump_sigtask(fn, task, d.getVar("STAMP", True), False)
147
148 for task in taskdeps:
149 d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
150
151 def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
152 # Return True if we should keep the dependency, False to drop it
153 # We only manipulate the dependencies for packages not in the whitelist
154 if self.twl and not self.twl.search(recipename):
155 # then process the actual dependencies
156 if self.twl.search(depname):
157 return False
158 return True
159
160 def read_taint(self, fn, task, stampbase):
161 taint = None
162 try:
163 with open(stampbase + '.' + task + '.taint', 'r') as taintf:
164 taint = taintf.read()
165 except IOError:
166 pass
167 return taint
168
169 def get_taskhash(self, fn, task, deps, dataCache):
170 k = fn + "." + task
171 data = dataCache.basetaskhash[k]
172 self.runtaskdeps[k] = []
173 self.file_checksum_values[k] = {}
174 recipename = dataCache.pkg_fn[fn]
175 for dep in sorted(deps, key=clean_basepath):
176 depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
177 if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
178 continue
179 if dep not in self.taskhash:
180 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
181 data = data + self.taskhash[dep]
182 self.runtaskdeps[k].append(dep)
183
184 if task in dataCache.file_checksums[fn]:
185 checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
186 for (f,cs) in checksums:
187 self.file_checksum_values[k][f] = cs
188 data = data + cs
189
190 taint = self.read_taint(fn, task, dataCache.stamp[fn])
191 if taint:
192 data = data + taint
193
194 h = hashlib.md5(data).hexdigest()
195 self.taskhash[k] = h
196 #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
197 return h
198
199 def set_taskdata(self, hashes, deps, checksums):
200 self.runtaskdeps = deps
201 self.taskhash = hashes
202 self.file_checksum_values = checksums
203
204 def dump_sigtask(self, fn, task, stampbase, runtime):
205 k = fn + "." + task
206 if runtime == "customfile":
207 sigfile = stampbase
208 elif runtime and k in self.taskhash:
209 sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
210 else:
211 sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
212
213 bb.utils.mkdirhier(os.path.dirname(sigfile))
214
215 data = {}
216 data['basewhitelist'] = self.basewhitelist
217 data['taskwhitelist'] = self.taskwhitelist
218 data['taskdeps'] = self.taskdeps[fn][task]
219 data['basehash'] = self.basehash[k]
220 data['gendeps'] = {}
221 data['varvals'] = {}
222 data['varvals'][task] = self.lookupcache[fn][task]
223 for dep in self.taskdeps[fn][task]:
224 if dep in self.basewhitelist:
225 continue
226 data['gendeps'][dep] = self.gendeps[fn][dep]
227 data['varvals'][dep] = self.lookupcache[fn][dep]
228
229 if runtime and k in self.taskhash:
230 data['runtaskdeps'] = self.runtaskdeps[k]
231 data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k].items()]
232 data['runtaskhashes'] = {}
233 for dep in data['runtaskdeps']:
234 data['runtaskhashes'][dep] = self.taskhash[dep]
235
236 taint = self.read_taint(fn, task, stampbase)
237 if taint:
238 data['taint'] = taint
239
240 fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
241 try:
242 with os.fdopen(fd, "wb") as stream:
243 p = pickle.dump(data, stream, -1)
244 stream.flush()
245 os.chmod(tmpfile, 0664)
246 os.rename(tmpfile, sigfile)
247 except (OSError, IOError) as err:
248 try:
249 os.unlink(tmpfile)
250 except OSError:
251 pass
252 raise err
253
254 def dump_sigs(self, dataCache, options):
255 for fn in self.taskdeps:
256 for task in self.taskdeps[fn]:
257 k = fn + "." + task
258 if k not in self.taskhash:
259 continue
260 if dataCache.basetaskhash[k] != self.basehash[k]:
261 bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
262 bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
263 self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
264
265class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
266 name = "basichash"
267
268 def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
269 if taskname != "do_setscene" and taskname.endswith("_setscene"):
270 k = fn + "." + taskname[:-9]
271 else:
272 k = fn + "." + taskname
273 if clean:
274 h = "*"
275 elif k in self.taskhash:
276 h = self.taskhash[k]
277 else:
278 # If k is not in basehash, then error
279 h = self.basehash[k]
280 return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
281
282 def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
283 return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
284
285 def invalidate_task(self, task, d, fn):
286 bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
287 bb.build.write_taint(task, d, fn)
288
289def dump_this_task(outfile, d):
290 import bb.parse
291 fn = d.getVar("BB_FILENAME", True)
292 task = "do_" + d.getVar("BB_CURRENTTASK", True)
293 bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
294
295def clean_basepath(a):
296 if a.startswith("virtual:"):
297 b = a.rsplit(":", 1)[0] + ":" + a.rsplit("/", 1)[1]
298 else:
299 b = a.rsplit("/", 1)[1]
300 return b
301
302def clean_basepaths(a):
303 b = {}
304 for x in a:
305 b[clean_basepath(x)] = a[x]
306 return b
307
308def compare_sigfiles(a, b, recursecb = None):
309 output = []
310
311 p1 = pickle.Unpickler(open(a, "rb"))
312 a_data = p1.load()
313 p2 = pickle.Unpickler(open(b, "rb"))
314 b_data = p2.load()
315
316 def dict_diff(a, b, whitelist=set()):
317 sa = set(a.keys())
318 sb = set(b.keys())
319 common = sa & sb
320 changed = set()
321 for i in common:
322 if a[i] != b[i] and i not in whitelist:
323 changed.add(i)
324 added = sb - sa
325 removed = sa - sb
326 return changed, added, removed
327
328 def file_checksums_diff(a, b):
329 from collections import Counter
330 # Handle old siginfo format
331 if isinstance(a, dict):
332 a = [(os.path.basename(f), cs) for f, cs in a.items()]
333 if isinstance(b, dict):
334 b = [(os.path.basename(f), cs) for f, cs in b.items()]
335 # Compare lists, ensuring we can handle duplicate filenames if they exist
336 removedcount = Counter(a)
337 removedcount.subtract(b)
338 addedcount = Counter(b)
339 addedcount.subtract(a)
340 added = []
341 for x in b:
342 if addedcount[x] > 0:
343 addedcount[x] -= 1
344 added.append(x)
345 removed = []
346 changed = []
347 for x in a:
348 if removedcount[x] > 0:
349 removedcount[x] -= 1
350 for y in added:
351 if y[0] == x[0]:
352 changed.append((x[0], x[1], y[1]))
353 added.remove(y)
354 break
355 else:
356 removed.append(x)
357 added = [x[0] for x in added]
358 removed = [x[0] for x in removed]
359 return changed, added, removed
360
361 if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
362 output.append("basewhitelist changed from '%s' to '%s'" % (a_data['basewhitelist'], b_data['basewhitelist']))
363 if a_data['basewhitelist'] and b_data['basewhitelist']:
364 output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist']))
365
366 if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
367 output.append("taskwhitelist changed from '%s' to '%s'" % (a_data['taskwhitelist'], b_data['taskwhitelist']))
368 if a_data['taskwhitelist'] and b_data['taskwhitelist']:
369 output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist']))
370
371 if a_data['taskdeps'] != b_data['taskdeps']:
372 output.append("Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
373
374 if a_data['basehash'] != b_data['basehash']:
375 output.append("basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash']))
376
377 changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
378 if changed:
379 for dep in changed:
380 output.append("List of dependencies for variable %s changed from '%s' to '%s'" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
381 if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
382 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
383 if added:
384 for dep in added:
385 output.append("Dependency on variable %s was added" % (dep))
386 if removed:
387 for dep in removed:
388 output.append("Dependency on Variable %s was removed" % (dep))
389
390
391 changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
392 if changed:
393 for dep in changed:
394 output.append("Variable %s value changed from '%s' to '%s'" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]))
395
396 changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
397 if changed:
398 for f, old, new in changed:
399 output.append("Checksum for file %s changed from %s to %s" % (f, old, new))
400 if added:
401 for f in added:
402 output.append("Dependency on checksum of file %s was added" % (f))
403 if removed:
404 for f in removed:
405 output.append("Dependency on checksum of file %s was removed" % (f))
406
407
408 if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
409 a = a_data['runtaskhashes']
410 b = b_data['runtaskhashes']
411 changed, added, removed = dict_diff(a, b)
412 if added:
413 for dep in added:
414 bdep_found = False
415 if removed:
416 for bdep in removed:
417 if b[dep] == a[bdep]:
418 #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
419 bdep_found = True
420 if not bdep_found:
421 output.append("Dependency on task %s was added with hash %s" % (clean_basepath(dep), b[dep]))
422 if removed:
423 for dep in removed:
424 adep_found = False
425 if added:
426 for adep in added:
427 if b[adep] == a[dep]:
428 #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
429 adep_found = True
430 if not adep_found:
431 output.append("Dependency on task %s was removed with hash %s" % (clean_basepath(dep), a[dep]))
432 if changed:
433 for dep in changed:
434 output.append("Hash for dependent task %s changed from %s to %s" % (clean_basepath(dep), a[dep], b[dep]))
435 if callable(recursecb):
436 # If a dependent hash changed, might as well print the line above and then defer to the changes in
437 # that hash since in all likelyhood, they're the same changes this task also saw.
438 recout = recursecb(dep, a[dep], b[dep])
439 if recout:
440 output = [output[-1]] + recout
441
442 a_taint = a_data.get('taint', None)
443 b_taint = b_data.get('taint', None)
444 if a_taint != b_taint:
445 output.append("Taint (by forced/invalidated task) changed from %s to %s" % (a_taint, b_taint))
446
447 return output
448
449
450def dump_sigfile(a):
451 output = []
452
453 p1 = pickle.Unpickler(open(a, "rb"))
454 a_data = p1.load()
455
456 output.append("basewhitelist: %s" % (a_data['basewhitelist']))
457
458 output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))
459
460 output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
461
462 output.append("basehash: %s" % (a_data['basehash']))
463
464 for dep in a_data['gendeps']:
465 output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep]))
466
467 for dep in a_data['varvals']:
468 output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
469
470 if 'runtaskdeps' in a_data:
471 output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps']))
472
473 if 'file_checksum_values' in a_data:
474 output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values']))
475
476 if 'runtaskhashes' in a_data:
477 for dep in a_data['runtaskhashes']:
478 output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
479
480 if 'taint' in a_data:
481 output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
482
483 return output
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
new file mode 100644
index 0000000000..af72a1fb09
--- /dev/null
+++ b/bitbake/lib/bb/taskdata.py
@@ -0,0 +1,651 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake 'TaskData' implementation
6
7Task data collection and handling
8
9"""
10
11# Copyright (C) 2006 Richard Purdie
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import logging
27import re
28import bb
29
30logger = logging.getLogger("BitBake.TaskData")
31
32def re_match_strings(target, strings):
33 """
34 Whether or not the string 'target' matches
35 any one string of the strings which can be regular expression string
36 """
37 return any(name == target or re.match(name, target)
38 for name in strings)
39
40class TaskData:
41 """
42 BitBake Task Data implementation
43 """
44 def __init__(self, abort = True, tryaltconfigs = False, skiplist = None):
45 self.build_names_index = []
46 self.run_names_index = []
47 self.fn_index = []
48
49 self.build_targets = {}
50 self.run_targets = {}
51
52 self.external_targets = []
53
54 self.tasks_fnid = []
55 self.tasks_name = []
56 self.tasks_tdepends = []
57 self.tasks_idepends = []
58 self.tasks_irdepends = []
59 # Cache to speed up task ID lookups
60 self.tasks_lookup = {}
61
62 self.depids = {}
63 self.rdepids = {}
64
65 self.consider_msgs_cache = []
66
67 self.failed_deps = []
68 self.failed_rdeps = []
69 self.failed_fnids = []
70
71 self.abort = abort
72 self.tryaltconfigs = tryaltconfigs
73
74 self.skiplist = skiplist
75
76 def getbuild_id(self, name):
77 """
78 Return an ID number for the build target name.
79 If it doesn't exist, create one.
80 """
81 if not name in self.build_names_index:
82 self.build_names_index.append(name)
83 return len(self.build_names_index) - 1
84
85 return self.build_names_index.index(name)
86
87 def getrun_id(self, name):
88 """
89 Return an ID number for the run target name.
90 If it doesn't exist, create one.
91 """
92 if not name in self.run_names_index:
93 self.run_names_index.append(name)
94 return len(self.run_names_index) - 1
95
96 return self.run_names_index.index(name)
97
98 def getfn_id(self, name):
99 """
100 Return an ID number for the filename.
101 If it doesn't exist, create one.
102 """
103 if not name in self.fn_index:
104 self.fn_index.append(name)
105 return len(self.fn_index) - 1
106
107 return self.fn_index.index(name)
108
109 def gettask_ids(self, fnid):
110 """
111 Return an array of the ID numbers matching a given fnid.
112 """
113 ids = []
114 if fnid in self.tasks_lookup:
115 for task in self.tasks_lookup[fnid]:
116 ids.append(self.tasks_lookup[fnid][task])
117 return ids
118
119 def gettask_id_fromfnid(self, fnid, task):
120 """
121 Return an ID number for the task matching fnid and task.
122 """
123 if fnid in self.tasks_lookup:
124 if task in self.tasks_lookup[fnid]:
125 return self.tasks_lookup[fnid][task]
126
127 return None
128
129 def gettask_id(self, fn, task, create = True):
130 """
131 Return an ID number for the task matching fn and task.
132 If it doesn't exist, create one by default.
133 Optionally return None instead.
134 """
135 fnid = self.getfn_id(fn)
136
137 if fnid in self.tasks_lookup:
138 if task in self.tasks_lookup[fnid]:
139 return self.tasks_lookup[fnid][task]
140
141 if not create:
142 return None
143
144 self.tasks_name.append(task)
145 self.tasks_fnid.append(fnid)
146 self.tasks_tdepends.append([])
147 self.tasks_idepends.append([])
148 self.tasks_irdepends.append([])
149
150 listid = len(self.tasks_name) - 1
151
152 if fnid not in self.tasks_lookup:
153 self.tasks_lookup[fnid] = {}
154 self.tasks_lookup[fnid][task] = listid
155
156 return listid
157
158 def add_tasks(self, fn, dataCache):
159 """
160 Add tasks for a given fn to the database
161 """
162
163 task_deps = dataCache.task_deps[fn]
164
165 fnid = self.getfn_id(fn)
166
167 if fnid in self.failed_fnids:
168 bb.msg.fatal("TaskData", "Trying to re-add a failed file? Something is broken...")
169
170 # Check if we've already seen this fn
171 if fnid in self.tasks_fnid:
172 return
173
174 for task in task_deps['tasks']:
175
176 # Work out task dependencies
177 parentids = []
178 for dep in task_deps['parents'][task]:
179 if dep not in task_deps['tasks']:
180 bb.debug(2, "Not adding dependeny of %s on %s since %s does not exist" % (task, dep, dep))
181 continue
182 parentid = self.gettask_id(fn, dep)
183 parentids.append(parentid)
184 taskid = self.gettask_id(fn, task)
185 self.tasks_tdepends[taskid].extend(parentids)
186
187 # Touch all intertask dependencies
188 if 'depends' in task_deps and task in task_deps['depends']:
189 ids = []
190 for dep in task_deps['depends'][task].split():
191 if dep:
192 if ":" not in dep:
193 bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep))
194 ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
195 self.tasks_idepends[taskid].extend(ids)
196 if 'rdepends' in task_deps and task in task_deps['rdepends']:
197 ids = []
198 for dep in task_deps['rdepends'][task].split():
199 if dep:
200 if ":" not in dep:
201 bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep))
202 ids.append(((self.getrun_id(dep.split(":")[0])), dep.split(":")[1]))
203 self.tasks_irdepends[taskid].extend(ids)
204
205
206 # Work out build dependencies
207 if not fnid in self.depids:
208 dependids = {}
209 for depend in dataCache.deps[fn]:
210 dependids[self.getbuild_id(depend)] = None
211 self.depids[fnid] = dependids.keys()
212 logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
213
214 # Work out runtime dependencies
215 if not fnid in self.rdepids:
216 rdependids = {}
217 rdepends = dataCache.rundeps[fn]
218 rrecs = dataCache.runrecs[fn]
219 rdependlist = []
220 rreclist = []
221 for package in rdepends:
222 for rdepend in rdepends[package]:
223 rdependlist.append(rdepend)
224 rdependids[self.getrun_id(rdepend)] = None
225 for package in rrecs:
226 for rdepend in rrecs[package]:
227 rreclist.append(rdepend)
228 rdependids[self.getrun_id(rdepend)] = None
229 if rdependlist:
230 logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn)
231 if rreclist:
232 logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn)
233 self.rdepids[fnid] = rdependids.keys()
234
235 for dep in self.depids[fnid]:
236 if dep in self.failed_deps:
237 self.fail_fnid(fnid)
238 return
239 for dep in self.rdepids[fnid]:
240 if dep in self.failed_rdeps:
241 self.fail_fnid(fnid)
242 return
243
244 def have_build_target(self, target):
245 """
246 Have we a build target matching this name?
247 """
248 targetid = self.getbuild_id(target)
249
250 if targetid in self.build_targets:
251 return True
252 return False
253
254 def have_runtime_target(self, target):
255 """
256 Have we a runtime target matching this name?
257 """
258 targetid = self.getrun_id(target)
259
260 if targetid in self.run_targets:
261 return True
262 return False
263
264 def add_build_target(self, fn, item):
265 """
266 Add a build target.
267 If already present, append the provider fn to the list
268 """
269 targetid = self.getbuild_id(item)
270 fnid = self.getfn_id(fn)
271
272 if targetid in self.build_targets:
273 if fnid in self.build_targets[targetid]:
274 return
275 self.build_targets[targetid].append(fnid)
276 return
277 self.build_targets[targetid] = [fnid]
278
279 def add_runtime_target(self, fn, item):
280 """
281 Add a runtime target.
282 If already present, append the provider fn to the list
283 """
284 targetid = self.getrun_id(item)
285 fnid = self.getfn_id(fn)
286
287 if targetid in self.run_targets:
288 if fnid in self.run_targets[targetid]:
289 return
290 self.run_targets[targetid].append(fnid)
291 return
292 self.run_targets[targetid] = [fnid]
293
294 def mark_external_target(self, item):
295 """
296 Mark a build target as being externally requested
297 """
298 targetid = self.getbuild_id(item)
299
300 if targetid not in self.external_targets:
301 self.external_targets.append(targetid)
302
303 def get_unresolved_build_targets(self, dataCache):
304 """
305 Return a list of build targets who's providers
306 are unknown.
307 """
308 unresolved = []
309 for target in self.build_names_index:
310 if re_match_strings(target, dataCache.ignored_dependencies):
311 continue
312 if self.build_names_index.index(target) in self.failed_deps:
313 continue
314 if not self.have_build_target(target):
315 unresolved.append(target)
316 return unresolved
317
318 def get_unresolved_run_targets(self, dataCache):
319 """
320 Return a list of runtime targets who's providers
321 are unknown.
322 """
323 unresolved = []
324 for target in self.run_names_index:
325 if re_match_strings(target, dataCache.ignored_dependencies):
326 continue
327 if self.run_names_index.index(target) in self.failed_rdeps:
328 continue
329 if not self.have_runtime_target(target):
330 unresolved.append(target)
331 return unresolved
332
333 def get_provider(self, item):
334 """
335 Return a list of providers of item
336 """
337 targetid = self.getbuild_id(item)
338
339 return self.build_targets[targetid]
340
341 def get_dependees(self, itemid):
342 """
343 Return a list of targets which depend on item
344 """
345 dependees = []
346 for fnid in self.depids:
347 if itemid in self.depids[fnid]:
348 dependees.append(fnid)
349 return dependees
350
351 def get_dependees_str(self, item):
352 """
353 Return a list of targets which depend on item as a user readable string
354 """
355 itemid = self.getbuild_id(item)
356 dependees = []
357 for fnid in self.depids:
358 if itemid in self.depids[fnid]:
359 dependees.append(self.fn_index[fnid])
360 return dependees
361
362 def get_rdependees(self, itemid):
363 """
364 Return a list of targets which depend on runtime item
365 """
366 dependees = []
367 for fnid in self.rdepids:
368 if itemid in self.rdepids[fnid]:
369 dependees.append(fnid)
370 return dependees
371
372 def get_rdependees_str(self, item):
373 """
374 Return a list of targets which depend on runtime item as a user readable string
375 """
376 itemid = self.getrun_id(item)
377 dependees = []
378 for fnid in self.rdepids:
379 if itemid in self.rdepids[fnid]:
380 dependees.append(self.fn_index[fnid])
381 return dependees
382
383 def get_reasons(self, item, runtime=False):
384 """
385 Get the reason(s) for an item not being provided, if any
386 """
387 reasons = []
388 if self.skiplist:
389 for fn in self.skiplist:
390 skipitem = self.skiplist[fn]
391 if skipitem.pn == item:
392 reasons.append("%s was skipped: %s" % (skipitem.pn, skipitem.skipreason))
393 elif runtime and item in skipitem.rprovides:
394 reasons.append("%s RPROVIDES %s but was skipped: %s" % (skipitem.pn, item, skipitem.skipreason))
395 elif not runtime and item in skipitem.provides:
396 reasons.append("%s PROVIDES %s but was skipped: %s" % (skipitem.pn, item, skipitem.skipreason))
397 return reasons
398
399 def get_close_matches(self, item, provider_list):
400 import difflib
401 if self.skiplist:
402 skipped = []
403 for fn in self.skiplist:
404 skipped.append(self.skiplist[fn].pn)
405 full_list = provider_list + skipped
406 else:
407 full_list = provider_list
408 return difflib.get_close_matches(item, full_list, cutoff=0.7)
409
410 def add_provider(self, cfgData, dataCache, item):
411 try:
412 self.add_provider_internal(cfgData, dataCache, item)
413 except bb.providers.NoProvider:
414 if self.abort:
415 raise
416 self.remove_buildtarget(self.getbuild_id(item))
417
418 self.mark_external_target(item)
419
420 def add_provider_internal(self, cfgData, dataCache, item):
421 """
422 Add the providers of item to the task data
423 Mark entries were specifically added externally as against dependencies
424 added internally during dependency resolution
425 """
426
427 if re_match_strings(item, dataCache.ignored_dependencies):
428 return
429
430 if not item in dataCache.providers:
431 bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=self.get_reasons(item), close_matches=self.get_close_matches(item, dataCache.providers.keys())), cfgData)
432 raise bb.providers.NoProvider(item)
433
434 if self.have_build_target(item):
435 return
436
437 all_p = dataCache.providers[item]
438
439 eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
440 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
441
442 if not eligible:
443 bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
444 raise bb.providers.NoProvider(item)
445
446 if len(eligible) > 1 and foundUnique == False:
447 if item not in self.consider_msgs_cache:
448 providers_list = []
449 for fn in eligible:
450 providers_list.append(dataCache.pkg_fn[fn])
451 bb.event.fire(bb.event.MultipleProviders(item, providers_list), cfgData)
452 self.consider_msgs_cache.append(item)
453
454 for fn in eligible:
455 fnid = self.getfn_id(fn)
456 if fnid in self.failed_fnids:
457 continue
458 logger.debug(2, "adding %s to satisfy %s", fn, item)
459 self.add_build_target(fn, item)
460 self.add_tasks(fn, dataCache)
461
462
463 #item = dataCache.pkg_fn[fn]
464
465 def add_rprovider(self, cfgData, dataCache, item):
466 """
467 Add the runtime providers of item to the task data
468 (takes item names from RDEPENDS/PACKAGES namespace)
469 """
470
471 if re_match_strings(item, dataCache.ignored_dependencies):
472 return
473
474 if self.have_runtime_target(item):
475 return
476
477 all_p = bb.providers.getRuntimeProviders(dataCache, item)
478
479 if not all_p:
480 bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=self.get_reasons(item, True)), cfgData)
481 raise bb.providers.NoRProvider(item)
482
483 eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
484 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
485
486 if not eligible:
487 bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=["No eligible RPROVIDERs exist for '%s'" % item]), cfgData)
488 raise bb.providers.NoRProvider(item)
489
490 if len(eligible) > 1 and numberPreferred == 0:
491 if item not in self.consider_msgs_cache:
492 providers_list = []
493 for fn in eligible:
494 providers_list.append(dataCache.pkg_fn[fn])
495 bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
496 self.consider_msgs_cache.append(item)
497
498 if numberPreferred > 1:
499 if item not in self.consider_msgs_cache:
500 providers_list = []
501 for fn in eligible:
502 providers_list.append(dataCache.pkg_fn[fn])
503 bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
504 self.consider_msgs_cache.append(item)
505 raise bb.providers.MultipleRProvider(item)
506
507 # run through the list until we find one that we can build
508 for fn in eligible:
509 fnid = self.getfn_id(fn)
510 if fnid in self.failed_fnids:
511 continue
512 logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
513 self.add_runtime_target(fn, item)
514 self.add_tasks(fn, dataCache)
515
516 def fail_fnid(self, fnid, missing_list = []):
517 """
518 Mark a file as failed (unbuildable)
519 Remove any references from build and runtime provider lists
520
521 missing_list, A list of missing requirements for this target
522 """
523 if fnid in self.failed_fnids:
524 return
525 logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
526 self.failed_fnids.append(fnid)
527 for target in self.build_targets:
528 if fnid in self.build_targets[target]:
529 self.build_targets[target].remove(fnid)
530 if len(self.build_targets[target]) == 0:
531 self.remove_buildtarget(target, missing_list)
532 for target in self.run_targets:
533 if fnid in self.run_targets[target]:
534 self.run_targets[target].remove(fnid)
535 if len(self.run_targets[target]) == 0:
536 self.remove_runtarget(target, missing_list)
537
538 def remove_buildtarget(self, targetid, missing_list = []):
539 """
540 Mark a build target as failed (unbuildable)
541 Trigger removal of any files that have this as a dependency
542 """
543 if not missing_list:
544 missing_list = [self.build_names_index[targetid]]
545 else:
546 missing_list = [self.build_names_index[targetid]] + missing_list
547 logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
548 self.failed_deps.append(targetid)
549 dependees = self.get_dependees(targetid)
550 for fnid in dependees:
551 self.fail_fnid(fnid, missing_list)
552 for taskid in xrange(len(self.tasks_idepends)):
553 idepends = self.tasks_idepends[taskid]
554 for (idependid, idependtask) in idepends:
555 if idependid == targetid:
556 self.fail_fnid(self.tasks_fnid[taskid], missing_list)
557
558 if self.abort and targetid in self.external_targets:
559 target = self.build_names_index[targetid]
560 logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
561 raise bb.providers.NoProvider(target)
562
563 def remove_runtarget(self, targetid, missing_list = []):
564 """
565 Mark a run target as failed (unbuildable)
566 Trigger removal of any files that have this as a dependency
567 """
568 if not missing_list:
569 missing_list = [self.run_names_index[targetid]]
570 else:
571 missing_list = [self.run_names_index[targetid]] + missing_list
572
573 logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
574 self.failed_rdeps.append(targetid)
575 dependees = self.get_rdependees(targetid)
576 for fnid in dependees:
577 self.fail_fnid(fnid, missing_list)
578 for taskid in xrange(len(self.tasks_irdepends)):
579 irdepends = self.tasks_irdepends[taskid]
580 for (idependid, idependtask) in irdepends:
581 if idependid == targetid:
582 self.fail_fnid(self.tasks_fnid[taskid], missing_list)
583
584 def add_unresolved(self, cfgData, dataCache):
585 """
586 Resolve all unresolved build and runtime targets
587 """
588 logger.info("Resolving any missing task queue dependencies")
589 while True:
590 added = 0
591 for target in self.get_unresolved_build_targets(dataCache):
592 try:
593 self.add_provider_internal(cfgData, dataCache, target)
594 added = added + 1
595 except bb.providers.NoProvider:
596 targetid = self.getbuild_id(target)
597 if self.abort and targetid in self.external_targets:
598 raise
599 self.remove_buildtarget(targetid)
600 for target in self.get_unresolved_run_targets(dataCache):
601 try:
602 self.add_rprovider(cfgData, dataCache, target)
603 added = added + 1
604 except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
605 self.remove_runtarget(self.getrun_id(target))
606 logger.debug(1, "Resolved " + str(added) + " extra dependencies")
607 if added == 0:
608 break
609 # self.dump_data()
610
611 def dump_data(self):
612 """
613 Dump some debug information on the internal data structures
614 """
615 logger.debug(3, "build_names:")
616 logger.debug(3, ", ".join(self.build_names_index))
617
618 logger.debug(3, "run_names:")
619 logger.debug(3, ", ".join(self.run_names_index))
620
621 logger.debug(3, "build_targets:")
622 for buildid in xrange(len(self.build_names_index)):
623 target = self.build_names_index[buildid]
624 targets = "None"
625 if buildid in self.build_targets:
626 targets = self.build_targets[buildid]
627 logger.debug(3, " (%s)%s: %s", buildid, target, targets)
628
629 logger.debug(3, "run_targets:")
630 for runid in xrange(len(self.run_names_index)):
631 target = self.run_names_index[runid]
632 targets = "None"
633 if runid in self.run_targets:
634 targets = self.run_targets[runid]
635 logger.debug(3, " (%s)%s: %s", runid, target, targets)
636
637 logger.debug(3, "tasks:")
638 for task in xrange(len(self.tasks_name)):
639 logger.debug(3, " (%s)%s - %s: %s",
640 task,
641 self.fn_index[self.tasks_fnid[task]],
642 self.tasks_name[task],
643 self.tasks_tdepends[task])
644
645 logger.debug(3, "dependency ids (per fn):")
646 for fnid in self.depids:
647 logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
648
649 logger.debug(3, "runtime dependency ids (per fn):")
650 for fnid in self.rdepids:
651 logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
diff --git a/bitbake/lib/bb/tests/__init__.py b/bitbake/lib/bb/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/tests/__init__.py
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
new file mode 100644
index 0000000000..4454bc51ed
--- /dev/null
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -0,0 +1,375 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Test for codeparser.py
5#
6# Copyright (C) 2010 Chris Larson
7# Copyright (C) 2012 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21#
22
23import unittest
24import logging
25import bb
26
27logger = logging.getLogger('BitBake.TestCodeParser')
28
29# bb.data references bb.parse but can't directly import due to circular dependencies.
30# Hack around it for now :(
31import bb.parse
32import bb.data
33
34class ReferenceTest(unittest.TestCase):
35 def setUp(self):
36 self.d = bb.data.init()
37
38 def setEmptyVars(self, varlist):
39 for k in varlist:
40 self.d.setVar(k, "")
41
42 def setValues(self, values):
43 for k, v in values.items():
44 self.d.setVar(k, v)
45
46 def assertReferences(self, refs):
47 self.assertEqual(self.references, refs)
48
49 def assertExecs(self, execs):
50 self.assertEqual(self.execs, execs)
51
52class VariableReferenceTest(ReferenceTest):
53
54 def parseExpression(self, exp):
55 parsedvar = self.d.expandWithRefs(exp, None)
56 self.references = parsedvar.references
57
58 def test_simple_reference(self):
59 self.setEmptyVars(["FOO"])
60 self.parseExpression("${FOO}")
61 self.assertReferences(set(["FOO"]))
62
63 def test_nested_reference(self):
64 self.setEmptyVars(["BAR"])
65 self.d.setVar("FOO", "BAR")
66 self.parseExpression("${${FOO}}")
67 self.assertReferences(set(["FOO", "BAR"]))
68
69 def test_python_reference(self):
70 self.setEmptyVars(["BAR"])
71 self.parseExpression("${@bb.data.getVar('BAR', d, True) + 'foo'}")
72 self.assertReferences(set(["BAR"]))
73
74class ShellReferenceTest(ReferenceTest):
75
76 def parseExpression(self, exp):
77 parsedvar = self.d.expandWithRefs(exp, None)
78 parser = bb.codeparser.ShellParser("ParserTest", logger)
79 parser.parse_shell(parsedvar.value)
80
81 self.references = parsedvar.references
82 self.execs = parser.execs
83
84 def test_quotes_inside_assign(self):
85 self.parseExpression('foo=foo"bar"baz')
86 self.assertReferences(set([]))
87
88 def test_quotes_inside_arg(self):
89 self.parseExpression('sed s#"bar baz"#"alpha beta"#g')
90 self.assertExecs(set(["sed"]))
91
92 def test_arg_continuation(self):
93 self.parseExpression("sed -i -e s,foo,bar,g \\\n *.pc")
94 self.assertExecs(set(["sed"]))
95
96 def test_dollar_in_quoted(self):
97 self.parseExpression('sed -i -e "foo$" *.pc')
98 self.assertExecs(set(["sed"]))
99
100 def test_quotes_inside_arg_continuation(self):
101 self.setEmptyVars(["bindir", "D", "libdir"])
102 self.parseExpression("""
103sed -i -e s#"moc_location=.*$"#"moc_location=${bindir}/moc4"# \\
104-e s#"uic_location=.*$"#"uic_location=${bindir}/uic4"# \\
105${D}${libdir}/pkgconfig/*.pc
106""")
107 self.assertReferences(set(["bindir", "D", "libdir"]))
108
109 def test_assign_subshell_expansion(self):
110 self.parseExpression("foo=$(echo bar)")
111 self.assertExecs(set(["echo"]))
112
113 def test_shell_unexpanded(self):
114 self.setEmptyVars(["QT_BASE_NAME"])
115 self.parseExpression('echo "${QT_BASE_NAME}"')
116 self.assertExecs(set(["echo"]))
117 self.assertReferences(set(["QT_BASE_NAME"]))
118
119 def test_incomplete_varexp_single_quotes(self):
120 self.parseExpression("sed -i -e 's:IP{:I${:g' $pc")
121 self.assertExecs(set(["sed"]))
122
123
124 def test_until(self):
125 self.parseExpression("until false; do echo true; done")
126 self.assertExecs(set(["false", "echo"]))
127 self.assertReferences(set())
128
129 def test_case(self):
130 self.parseExpression("""
131case $foo in
132*)
133bar
134;;
135esac
136""")
137 self.assertExecs(set(["bar"]))
138 self.assertReferences(set())
139
140 def test_assign_exec(self):
141 self.parseExpression("a=b c='foo bar' alpha 1 2 3")
142 self.assertExecs(set(["alpha"]))
143
144 def test_redirect_to_file(self):
145 self.setEmptyVars(["foo"])
146 self.parseExpression("echo foo >${foo}/bar")
147 self.assertExecs(set(["echo"]))
148 self.assertReferences(set(["foo"]))
149
150 def test_heredoc(self):
151 self.setEmptyVars(["theta"])
152 self.parseExpression("""
153cat <<END
154alpha
155beta
156${theta}
157END
158""")
159 self.assertReferences(set(["theta"]))
160
161 def test_redirect_from_heredoc(self):
162 v = ["B", "SHADOW_MAILDIR", "SHADOW_MAILFILE", "SHADOW_UTMPDIR", "SHADOW_LOGDIR", "bindir"]
163 self.setEmptyVars(v)
164 self.parseExpression("""
165cat <<END >${B}/cachedpaths
166shadow_cv_maildir=${SHADOW_MAILDIR}
167shadow_cv_mailfile=${SHADOW_MAILFILE}
168shadow_cv_utmpdir=${SHADOW_UTMPDIR}
169shadow_cv_logdir=${SHADOW_LOGDIR}
170shadow_cv_passwd_dir=${bindir}
171END
172""")
173 self.assertReferences(set(v))
174 self.assertExecs(set(["cat"]))
175
176# def test_incomplete_command_expansion(self):
177# self.assertRaises(reftracker.ShellSyntaxError, reftracker.execs,
178# bbvalue.shparse("cp foo`", self.d), self.d)
179
180# def test_rogue_dollarsign(self):
181# self.setValues({"D" : "/tmp"})
182# self.parseExpression("install -d ${D}$")
183# self.assertReferences(set(["D"]))
184# self.assertExecs(set(["install"]))
185
186
187class PythonReferenceTest(ReferenceTest):
188
189 def setUp(self):
190 self.d = bb.data.init()
191 if hasattr(bb.utils, "_context"):
192 self.context = bb.utils._context
193 else:
194 import __builtin__
195 self.context = __builtin__.__dict__
196
197 def parseExpression(self, exp):
198 parsedvar = self.d.expandWithRefs(exp, None)
199 parser = bb.codeparser.PythonParser("ParserTest", logger)
200 parser.parse_python(parsedvar.value)
201
202 self.references = parsedvar.references | parser.references
203 self.execs = parser.execs
204
205 @staticmethod
206 def indent(value):
207 """Python Snippets have to be indented, python values don't have to
208be. These unit tests are testing snippets."""
209 return " " + value
210
211 def test_getvar_reference(self):
212 self.parseExpression("bb.data.getVar('foo', d, True)")
213 self.assertReferences(set(["foo"]))
214 self.assertExecs(set())
215
216 def test_getvar_computed_reference(self):
217 self.parseExpression("bb.data.getVar('f' + 'o' + 'o', d, True)")
218 self.assertReferences(set())
219 self.assertExecs(set())
220
221 def test_getvar_exec_reference(self):
222 self.parseExpression("eval('bb.data.getVar(\"foo\", d, True)')")
223 self.assertReferences(set())
224 self.assertExecs(set(["eval"]))
225
226 def test_var_reference(self):
227 self.context["foo"] = lambda x: x
228 self.setEmptyVars(["FOO"])
229 self.parseExpression("foo('${FOO}')")
230 self.assertReferences(set(["FOO"]))
231 self.assertExecs(set(["foo"]))
232 del self.context["foo"]
233
234 def test_var_exec(self):
235 for etype in ("func", "task"):
236 self.d.setVar("do_something", "echo 'hi mom! ${FOO}'")
237 self.d.setVarFlag("do_something", etype, True)
238 self.parseExpression("bb.build.exec_func('do_something', d)")
239 self.assertReferences(set([]))
240 self.assertExecs(set(["do_something"]))
241
242 def test_function_reference(self):
243 self.context["testfunc"] = lambda msg: bb.msg.note(1, None, msg)
244 self.d.setVar("FOO", "Hello, World!")
245 self.parseExpression("testfunc('${FOO}')")
246 self.assertReferences(set(["FOO"]))
247 self.assertExecs(set(["testfunc"]))
248 del self.context["testfunc"]
249
250 def test_qualified_function_reference(self):
251 self.parseExpression("time.time()")
252 self.assertExecs(set(["time.time"]))
253
254 def test_qualified_function_reference_2(self):
255 self.parseExpression("os.path.dirname('/foo/bar')")
256 self.assertExecs(set(["os.path.dirname"]))
257
258 def test_qualified_function_reference_nested(self):
259 self.parseExpression("time.strftime('%Y%m%d',time.gmtime())")
260 self.assertExecs(set(["time.strftime", "time.gmtime"]))
261
262 def test_function_reference_chained(self):
263 self.context["testget"] = lambda: "\tstrip me "
264 self.parseExpression("testget().strip()")
265 self.assertExecs(set(["testget"]))
266 del self.context["testget"]
267
268
269class DependencyReferenceTest(ReferenceTest):
270
271 pydata = """
272bb.data.getVar('somevar', d, True)
273def test(d):
274 foo = 'bar %s' % 'foo'
275def test2(d):
276 d.getVar(foo, True)
277 d.getVar('bar', False)
278 test2(d)
279
280def a():
281 \"\"\"some
282 stuff
283 \"\"\"
284 return "heh"
285
286test(d)
287
288bb.data.expand(bb.data.getVar("something", False, d), d)
289bb.data.expand("${inexpand} somethingelse", d)
290bb.data.getVar(a(), d, False)
291"""
292
293 def test_python(self):
294 self.d.setVar("FOO", self.pydata)
295 self.setEmptyVars(["inexpand", "a", "test2", "test"])
296 self.d.setVarFlags("FOO", {"func": True, "python": True})
297
298 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
299
300 self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
301
302
303 shelldata = """
304foo () {
305bar
306}
307{
308echo baz
309$(heh)
310eval `moo`
311}
312a=b
313c=d
314(
315true && false
316test -f foo
317testval=something
318$testval
319) || aiee
320! inverted
321echo ${somevar}
322
323case foo in
324bar)
325echo bar
326;;
327baz)
328echo baz
329;;
330foo*)
331echo foo
332;;
333esac
334"""
335
336 def test_shell(self):
337 execs = ["bar", "echo", "heh", "moo", "true", "aiee"]
338 self.d.setVar("somevar", "heh")
339 self.d.setVar("inverted", "echo inverted...")
340 self.d.setVarFlag("inverted", "func", True)
341 self.d.setVar("FOO", self.shelldata)
342 self.d.setVarFlags("FOO", {"func": True})
343 self.setEmptyVars(execs)
344
345 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
346
347 self.assertEquals(deps, set(["somevar", "inverted"] + execs))
348
349
350 def test_vardeps(self):
351 self.d.setVar("oe_libinstall", "echo test")
352 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
353 self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
354
355 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
356
357 self.assertEquals(deps, set(["oe_libinstall"]))
358
359 def test_vardeps_expand(self):
360 self.d.setVar("oe_libinstall", "echo test")
361 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
362 self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
363
364 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
365
366 self.assertEquals(deps, set(["oe_libinstall"]))
367
368 #Currently no wildcard support
369 #def test_vardeps_wildcards(self):
370 # self.d.setVar("oe_libinstall", "echo test")
371 # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
372 # self.d.setVarFlag("FOO", "vardeps", "oe_*")
373 # self.assertEquals(deps, set(["oe_libinstall"]))
374
375
diff --git a/bitbake/lib/bb/tests/cow.py b/bitbake/lib/bb/tests/cow.py
new file mode 100644
index 0000000000..35c5841f32
--- /dev/null
+++ b/bitbake/lib/bb/tests/cow.py
@@ -0,0 +1,136 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Tests for Copy-on-Write (cow.py)
5#
6# Copyright 2006 Holger Freyther <freyther@handhelds.org>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21
22import unittest
23import os
24
25class COWTestCase(unittest.TestCase):
26 """
27 Test case for the COW module from mithro
28 """
29
30 def testGetSet(self):
31 """
32 Test and set
33 """
34 from bb.COW import COWDictBase
35 a = COWDictBase.copy()
36
37 self.assertEquals(False, a.has_key('a'))
38
39 a['a'] = 'a'
40 a['b'] = 'b'
41 self.assertEquals(True, a.has_key('a'))
42 self.assertEquals(True, a.has_key('b'))
43 self.assertEquals('a', a['a'] )
44 self.assertEquals('b', a['b'] )
45
46 def testCopyCopy(self):
47 """
48 Test the copy of copies
49 """
50
51 from bb.COW import COWDictBase
52
53 # create two COW dict 'instances'
54 b = COWDictBase.copy()
55 c = COWDictBase.copy()
56
57 # assign some keys to one instance, some keys to another
58 b['a'] = 10
59 b['c'] = 20
60 c['a'] = 30
61
62 # test separation of the two instances
63 self.assertEquals(False, c.has_key('c'))
64 self.assertEquals(30, c['a'])
65 self.assertEquals(10, b['a'])
66
67 # test copy
68 b_2 = b.copy()
69 c_2 = c.copy()
70
71 self.assertEquals(False, c_2.has_key('c'))
72 self.assertEquals(10, b_2['a'])
73
74 b_2['d'] = 40
75 self.assertEquals(False, c_2.has_key('d'))
76 self.assertEquals(True, b_2.has_key('d'))
77 self.assertEquals(40, b_2['d'])
78 self.assertEquals(False, b.has_key('d'))
79 self.assertEquals(False, c.has_key('d'))
80
81 c_2['d'] = 30
82 self.assertEquals(True, c_2.has_key('d'))
83 self.assertEquals(True, b_2.has_key('d'))
84 self.assertEquals(30, c_2['d'])
85 self.assertEquals(40, b_2['d'])
86 self.assertEquals(False, b.has_key('d'))
87 self.assertEquals(False, c.has_key('d'))
88
89 # test copy of the copy
90 c_3 = c_2.copy()
91 b_3 = b_2.copy()
92 b_3_2 = b_2.copy()
93
94 c_3['e'] = 4711
95 self.assertEquals(4711, c_3['e'])
96 self.assertEquals(False, c_2.has_key('e'))
97 self.assertEquals(False, b_3.has_key('e'))
98 self.assertEquals(False, b_3_2.has_key('e'))
99 self.assertEquals(False, b_2.has_key('e'))
100
101 b_3['e'] = 'viel'
102 self.assertEquals('viel', b_3['e'])
103 self.assertEquals(4711, c_3['e'])
104 self.assertEquals(False, c_2.has_key('e'))
105 self.assertEquals(True, b_3.has_key('e'))
106 self.assertEquals(False, b_3_2.has_key('e'))
107 self.assertEquals(False, b_2.has_key('e'))
108
109 def testCow(self):
110 from bb.COW import COWDictBase
111 c = COWDictBase.copy()
112 c['123'] = 1027
113 c['other'] = 4711
114 c['d'] = { 'abc' : 10, 'bcd' : 20 }
115
116 copy = c.copy()
117
118 self.assertEquals(1027, c['123'])
119 self.assertEquals(4711, c['other'])
120 self.assertEquals({'abc':10, 'bcd':20}, c['d'])
121 self.assertEquals(1027, copy['123'])
122 self.assertEquals(4711, copy['other'])
123 self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
124
125 # cow it now
126 copy['123'] = 1028
127 copy['other'] = 4712
128 copy['d']['abc'] = 20
129
130
131 self.assertEquals(1027, c['123'])
132 self.assertEquals(4711, c['other'])
133 self.assertEquals({'abc':10, 'bcd':20}, c['d'])
134 self.assertEquals(1028, copy['123'])
135 self.assertEquals(4712, copy['other'])
136 self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
new file mode 100644
index 0000000000..ee66b22e25
--- /dev/null
+++ b/bitbake/lib/bb/tests/data.py
@@ -0,0 +1,296 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Tests for the Data Store (data.py/data_smart.py)
5#
6# Copyright (C) 2010 Chris Larson
7# Copyright (C) 2012 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21#
22
23import unittest
24import bb
25import bb.data
26import bb.parse
27
28class DataExpansions(unittest.TestCase):
29 def setUp(self):
30 self.d = bb.data.init()
31 self.d["foo"] = "value_of_foo"
32 self.d["bar"] = "value_of_bar"
33 self.d["value_of_foo"] = "value_of_'value_of_foo'"
34
35 def test_one_var(self):
36 val = self.d.expand("${foo}")
37 self.assertEqual(str(val), "value_of_foo")
38
39 def test_indirect_one_var(self):
40 val = self.d.expand("${${foo}}")
41 self.assertEqual(str(val), "value_of_'value_of_foo'")
42
43 def test_indirect_and_another(self):
44 val = self.d.expand("${${foo}} ${bar}")
45 self.assertEqual(str(val), "value_of_'value_of_foo' value_of_bar")
46
47 def test_python_snippet(self):
48 val = self.d.expand("${@5*12}")
49 self.assertEqual(str(val), "60")
50
51 def test_expand_in_python_snippet(self):
52 val = self.d.expand("${@'boo ' + '${foo}'}")
53 self.assertEqual(str(val), "boo value_of_foo")
54
55 def test_python_snippet_getvar(self):
56 val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
57 self.assertEqual(str(val), "value_of_foo value_of_bar")
58
59 def test_python_snippet_syntax_error(self):
60 self.d.setVar("FOO", "${@foo = 5}")
61 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
62
63 def test_python_snippet_runtime_error(self):
64 self.d.setVar("FOO", "${@int('test')}")
65 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
66
67 def test_python_snippet_error_path(self):
68 self.d.setVar("FOO", "foo value ${BAR}")
69 self.d.setVar("BAR", "bar value ${@int('test')}")
70 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
71
72 def test_value_containing_value(self):
73 val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
74 self.assertEqual(str(val), "value_of_foo value_of_bar")
75
76 def test_reference_undefined_var(self):
77 val = self.d.expand("${undefinedvar} meh")
78 self.assertEqual(str(val), "${undefinedvar} meh")
79
80 def test_double_reference(self):
81 self.d.setVar("BAR", "bar value")
82 self.d.setVar("FOO", "${BAR} foo ${BAR}")
83 val = self.d.getVar("FOO", True)
84 self.assertEqual(str(val), "bar value foo bar value")
85
86 def test_direct_recursion(self):
87 self.d.setVar("FOO", "${FOO}")
88 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
89
90 def test_indirect_recursion(self):
91 self.d.setVar("FOO", "${BAR}")
92 self.d.setVar("BAR", "${BAZ}")
93 self.d.setVar("BAZ", "${FOO}")
94 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
95
96 def test_recursion_exception(self):
97 self.d.setVar("FOO", "${BAR}")
98 self.d.setVar("BAR", "${${@'FOO'}}")
99 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
100
101 def test_incomplete_varexp_single_quotes(self):
102 self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc")
103 val = self.d.getVar("FOO", True)
104 self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc")
105
106 def test_nonstring(self):
107 self.d.setVar("TEST", 5)
108 val = self.d.getVar("TEST", True)
109 self.assertEqual(str(val), "5")
110
111 def test_rename(self):
112 self.d.renameVar("foo", "newfoo")
113 self.assertEqual(self.d.getVar("newfoo"), "value_of_foo")
114 self.assertEqual(self.d.getVar("foo"), None)
115
116 def test_deletion(self):
117 self.d.delVar("foo")
118 self.assertEqual(self.d.getVar("foo"), None)
119
120 def test_keys(self):
121 keys = self.d.keys()
122 self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
123
124class TestNestedExpansions(unittest.TestCase):
125 def setUp(self):
126 self.d = bb.data.init()
127 self.d["foo"] = "foo"
128 self.d["bar"] = "bar"
129 self.d["value_of_foobar"] = "187"
130
131 def test_refs(self):
132 val = self.d.expand("${value_of_${foo}${bar}}")
133 self.assertEqual(str(val), "187")
134
135 #def test_python_refs(self):
136 # val = self.d.expand("${@${@3}**2 + ${@4}**2}")
137 # self.assertEqual(str(val), "25")
138
139 def test_ref_in_python_ref(self):
140 val = self.d.expand("${@'${foo}' + 'bar'}")
141 self.assertEqual(str(val), "foobar")
142
143 def test_python_ref_in_ref(self):
144 val = self.d.expand("${${@'f'+'o'+'o'}}")
145 self.assertEqual(str(val), "foo")
146
147 def test_deep_nesting(self):
148 depth = 100
149 val = self.d.expand("${" * depth + "foo" + "}" * depth)
150 self.assertEqual(str(val), "foo")
151
152 #def test_deep_python_nesting(self):
153 # depth = 50
154 # val = self.d.expand("${@" * depth + "1" + "+1}" * depth)
155 # self.assertEqual(str(val), str(depth + 1))
156
157 def test_mixed(self):
158 val = self.d.expand("${value_of_${@('${foo}'+'bar')[0:3]}${${@'BAR'.lower()}}}")
159 self.assertEqual(str(val), "187")
160
161 def test_runtime(self):
162 val = self.d.expand("${${@'value_of' + '_f'+'o'+'o'+'b'+'a'+'r'}}")
163 self.assertEqual(str(val), "187")
164
165class TestMemoize(unittest.TestCase):
166 def test_memoized(self):
167 d = bb.data.init()
168 d.setVar("FOO", "bar")
169 self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
170
171 def test_not_memoized(self):
172 d1 = bb.data.init()
173 d2 = bb.data.init()
174 d1.setVar("FOO", "bar")
175 d2.setVar("FOO", "bar2")
176 self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
177
178 def test_changed_after_memoized(self):
179 d = bb.data.init()
180 d.setVar("foo", "value of foo")
181 self.assertEqual(str(d.getVar("foo")), "value of foo")
182 d.setVar("foo", "second value of foo")
183 self.assertEqual(str(d.getVar("foo")), "second value of foo")
184
185 def test_same_value(self):
186 d = bb.data.init()
187 d.setVar("foo", "value of")
188 d.setVar("bar", "value of")
189 self.assertEqual(d.getVar("foo"),
190 d.getVar("bar"))
191
192class TestConcat(unittest.TestCase):
193 def setUp(self):
194 self.d = bb.data.init()
195 self.d.setVar("FOO", "foo")
196 self.d.setVar("VAL", "val")
197 self.d.setVar("BAR", "bar")
198
199 def test_prepend(self):
200 self.d.setVar("TEST", "${VAL}")
201 self.d.prependVar("TEST", "${FOO}:")
202 self.assertEqual(self.d.getVar("TEST", True), "foo:val")
203
204 def test_append(self):
205 self.d.setVar("TEST", "${VAL}")
206 self.d.appendVar("TEST", ":${BAR}")
207 self.assertEqual(self.d.getVar("TEST", True), "val:bar")
208
209 def test_multiple_append(self):
210 self.d.setVar("TEST", "${VAL}")
211 self.d.prependVar("TEST", "${FOO}:")
212 self.d.appendVar("TEST", ":val2")
213 self.d.appendVar("TEST", ":${BAR}")
214 self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
215
216class TestConcatOverride(unittest.TestCase):
217 def setUp(self):
218 self.d = bb.data.init()
219 self.d.setVar("FOO", "foo")
220 self.d.setVar("VAL", "val")
221 self.d.setVar("BAR", "bar")
222
223 def test_prepend(self):
224 self.d.setVar("TEST", "${VAL}")
225 self.d.setVar("TEST_prepend", "${FOO}:")
226 bb.data.update_data(self.d)
227 self.assertEqual(self.d.getVar("TEST", True), "foo:val")
228
229 def test_append(self):
230 self.d.setVar("TEST", "${VAL}")
231 self.d.setVar("TEST_append", ":${BAR}")
232 bb.data.update_data(self.d)
233 self.assertEqual(self.d.getVar("TEST", True), "val:bar")
234
235 def test_multiple_append(self):
236 self.d.setVar("TEST", "${VAL}")
237 self.d.setVar("TEST_prepend", "${FOO}:")
238 self.d.setVar("TEST_append", ":val2")
239 self.d.setVar("TEST_append", ":${BAR}")
240 bb.data.update_data(self.d)
241 self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
242
243 def test_remove(self):
244 self.d.setVar("TEST", "${VAL} ${BAR}")
245 self.d.setVar("TEST_remove", "val")
246 bb.data.update_data(self.d)
247 self.assertEqual(self.d.getVar("TEST", True), "bar")
248
249 def test_doubleref_remove(self):
250 self.d.setVar("TEST", "${VAL} ${BAR}")
251 self.d.setVar("TEST_remove", "val")
252 self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
253 bb.data.update_data(self.d)
254 self.assertEqual(self.d.getVar("TEST_TEST", True), "bar bar")
255
256
257class TestOverrides(unittest.TestCase):
258 def setUp(self):
259 self.d = bb.data.init()
260 self.d.setVar("OVERRIDES", "foo:bar:local")
261 self.d.setVar("TEST", "testvalue")
262
263 def test_no_override(self):
264 bb.data.update_data(self.d)
265 self.assertEqual(self.d.getVar("TEST", True), "testvalue")
266
267 def test_one_override(self):
268 self.d.setVar("TEST_bar", "testvalue2")
269 bb.data.update_data(self.d)
270 self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
271
272 def test_multiple_override(self):
273 self.d.setVar("TEST_bar", "testvalue2")
274 self.d.setVar("TEST_local", "testvalue3")
275 self.d.setVar("TEST_foo", "testvalue4")
276 bb.data.update_data(self.d)
277 self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
278
279
280class TestFlags(unittest.TestCase):
281 def setUp(self):
282 self.d = bb.data.init()
283 self.d.setVar("foo", "value of foo")
284 self.d.setVarFlag("foo", "flag1", "value of flag1")
285 self.d.setVarFlag("foo", "flag2", "value of flag2")
286
287 def test_setflag(self):
288 self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
289 self.assertEqual(self.d.getVarFlag("foo", "flag2"), "value of flag2")
290
291 def test_delflag(self):
292 self.d.delVarFlag("foo", "flag2")
293 self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
294 self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
295
296
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
new file mode 100644
index 0000000000..7df7a0ef51
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -0,0 +1,562 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Tests for the Fetcher (fetch2/)
5#
6# Copyright (C) 2012 Richard Purdie
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21
22import unittest
23import tempfile
24import subprocess
25import os
26from bb.fetch2 import URI
27import bb
28
29class URITest(unittest.TestCase):
30 test_uris = {
31 "http://www.google.com/index.html" : {
32 'uri': 'http://www.google.com/index.html',
33 'scheme': 'http',
34 'hostname': 'www.google.com',
35 'port': None,
36 'hostport': 'www.google.com',
37 'path': '/index.html',
38 'userinfo': '',
39 'username': '',
40 'password': '',
41 'params': {},
42 'query': {},
43 'relative': False
44 },
45 "http://www.google.com/index.html;param1=value1" : {
46 'uri': 'http://www.google.com/index.html;param1=value1',
47 'scheme': 'http',
48 'hostname': 'www.google.com',
49 'port': None,
50 'hostport': 'www.google.com',
51 'path': '/index.html',
52 'userinfo': '',
53 'username': '',
54 'password': '',
55 'params': {
56 'param1': 'value1'
57 },
58 'query': {},
59 'relative': False
60 },
61 "http://www.example.org/index.html?param1=value1" : {
62 'uri': 'http://www.example.org/index.html?param1=value1',
63 'scheme': 'http',
64 'hostname': 'www.example.org',
65 'port': None,
66 'hostport': 'www.example.org',
67 'path': '/index.html',
68 'userinfo': '',
69 'username': '',
70 'password': '',
71 'params': {},
72 'query': {
73 'param1': 'value1'
74 },
75 'relative': False
76 },
77 "http://www.example.org/index.html?qparam1=qvalue1;param2=value2" : {
78 'uri': 'http://www.example.org/index.html?qparam1=qvalue1;param2=value2',
79 'scheme': 'http',
80 'hostname': 'www.example.org',
81 'port': None,
82 'hostport': 'www.example.org',
83 'path': '/index.html',
84 'userinfo': '',
85 'username': '',
86 'password': '',
87 'params': {
88 'param2': 'value2'
89 },
90 'query': {
91 'qparam1': 'qvalue1'
92 },
93 'relative': False
94 },
95 "http://www.example.com:8080/index.html" : {
96 'uri': 'http://www.example.com:8080/index.html',
97 'scheme': 'http',
98 'hostname': 'www.example.com',
99 'port': 8080,
100 'hostport': 'www.example.com:8080',
101 'path': '/index.html',
102 'userinfo': '',
103 'username': '',
104 'password': '',
105 'params': {},
106 'query': {},
107 'relative': False
108 },
109 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : {
110 'uri': 'cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg',
111 'scheme': 'cvs',
112 'hostname': 'cvs.handhelds.org',
113 'port': None,
114 'hostport': 'cvs.handhelds.org',
115 'path': '/cvs',
116 'userinfo': 'anoncvs',
117 'username': 'anoncvs',
118 'password': '',
119 'params': {
120 'module': 'familiar/dist/ipkg'
121 },
122 'query': {},
123 'relative': False
124 },
125 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg": {
126 'uri': 'cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg',
127 'scheme': 'cvs',
128 'hostname': 'cvs.handhelds.org',
129 'port': None,
130 'hostport': 'cvs.handhelds.org',
131 'path': '/cvs',
132 'userinfo': 'anoncvs:anonymous',
133 'username': 'anoncvs',
134 'password': 'anonymous',
135 'params': {
136 'tag': 'V0-99-81',
137 'module': 'familiar/dist/ipkg'
138 },
139 'query': {},
140 'relative': False
141 },
142 "file://example.diff": { # NOTE: Not RFC compliant!
143 'uri': 'file:example.diff',
144 'scheme': 'file',
145 'hostname': '',
146 'port': None,
147 'hostport': '',
148 'path': 'example.diff',
149 'userinfo': '',
150 'username': '',
151 'password': '',
152 'params': {},
153 'query': {},
154 'relative': True
155 },
156 "file:example.diff": { # NOTE: RFC compliant version of the former
157 'uri': 'file:example.diff',
158 'scheme': 'file',
159 'hostname': '',
160 'port': None,
161 'hostport': '',
162 'path': 'example.diff',
163 'userinfo': '',
164 'userinfo': '',
165 'username': '',
166 'password': '',
167 'params': {},
168 'query': {},
169 'relative': True
170 },
171 "file:///tmp/example.diff": {
172 'uri': 'file:///tmp/example.diff',
173 'scheme': 'file',
174 'hostname': '',
175 'port': None,
176 'hostport': '',
177 'path': '/tmp/example.diff',
178 'userinfo': '',
179 'userinfo': '',
180 'username': '',
181 'password': '',
182 'params': {},
183 'query': {},
184 'relative': False
185 },
186 "git:///path/example.git": {
187 'uri': 'git:///path/example.git',
188 'scheme': 'git',
189 'hostname': '',
190 'port': None,
191 'hostport': '',
192 'path': '/path/example.git',
193 'userinfo': '',
194 'userinfo': '',
195 'username': '',
196 'password': '',
197 'params': {},
198 'query': {},
199 'relative': False
200 },
201 "git:path/example.git": {
202 'uri': 'git:path/example.git',
203 'scheme': 'git',
204 'hostname': '',
205 'port': None,
206 'hostport': '',
207 'path': 'path/example.git',
208 'userinfo': '',
209 'userinfo': '',
210 'username': '',
211 'password': '',
212 'params': {},
213 'query': {},
214 'relative': True
215 },
216 "git://example.net/path/example.git": {
217 'uri': 'git://example.net/path/example.git',
218 'scheme': 'git',
219 'hostname': 'example.net',
220 'port': None,
221 'hostport': 'example.net',
222 'path': '/path/example.git',
223 'userinfo': '',
224 'userinfo': '',
225 'username': '',
226 'password': '',
227 'params': {},
228 'query': {},
229 'relative': False
230 }
231 }
232
233 def test_uri(self):
234 for test_uri, ref in self.test_uris.items():
235 uri = URI(test_uri)
236
237 self.assertEqual(str(uri), ref['uri'])
238
239 # expected attributes
240 self.assertEqual(uri.scheme, ref['scheme'])
241
242 self.assertEqual(uri.userinfo, ref['userinfo'])
243 self.assertEqual(uri.username, ref['username'])
244 self.assertEqual(uri.password, ref['password'])
245
246 self.assertEqual(uri.hostname, ref['hostname'])
247 self.assertEqual(uri.port, ref['port'])
248 self.assertEqual(uri.hostport, ref['hostport'])
249
250 self.assertEqual(uri.path, ref['path'])
251 self.assertEqual(uri.params, ref['params'])
252
253 self.assertEqual(uri.relative, ref['relative'])
254
255 def test_dict(self):
256 for test in self.test_uris.values():
257 uri = URI()
258
259 self.assertEqual(uri.scheme, '')
260 self.assertEqual(uri.userinfo, '')
261 self.assertEqual(uri.username, '')
262 self.assertEqual(uri.password, '')
263 self.assertEqual(uri.hostname, '')
264 self.assertEqual(uri.port, None)
265 self.assertEqual(uri.path, '')
266 self.assertEqual(uri.params, {})
267
268
269 uri.scheme = test['scheme']
270 self.assertEqual(uri.scheme, test['scheme'])
271
272 uri.userinfo = test['userinfo']
273 self.assertEqual(uri.userinfo, test['userinfo'])
274 self.assertEqual(uri.username, test['username'])
275 self.assertEqual(uri.password, test['password'])
276
277 # make sure changing the values doesn't do anything unexpected
278 uri.username = 'changeme'
279 self.assertEqual(uri.username, 'changeme')
280 self.assertEqual(uri.password, test['password'])
281 uri.password = 'insecure'
282 self.assertEqual(uri.username, 'changeme')
283 self.assertEqual(uri.password, 'insecure')
284
285 # reset back after our trickery
286 uri.userinfo = test['userinfo']
287 self.assertEqual(uri.userinfo, test['userinfo'])
288 self.assertEqual(uri.username, test['username'])
289 self.assertEqual(uri.password, test['password'])
290
291 uri.hostname = test['hostname']
292 self.assertEqual(uri.hostname, test['hostname'])
293 self.assertEqual(uri.hostport, test['hostname'])
294
295 uri.port = test['port']
296 self.assertEqual(uri.port, test['port'])
297 self.assertEqual(uri.hostport, test['hostport'])
298
299 uri.path = test['path']
300 self.assertEqual(uri.path, test['path'])
301
302 uri.params = test['params']
303 self.assertEqual(uri.params, test['params'])
304
305 uri.query = test['query']
306 self.assertEqual(uri.query, test['query'])
307
308 self.assertEqual(str(uri), test['uri'])
309
310 uri.params = {}
311 self.assertEqual(uri.params, {})
312 self.assertEqual(str(uri), (str(uri).split(";"))[0])
313
314class FetcherTest(unittest.TestCase):
315
316 def setUp(self):
317 self.d = bb.data.init()
318 self.tempdir = tempfile.mkdtemp()
319 self.dldir = os.path.join(self.tempdir, "download")
320 os.mkdir(self.dldir)
321 self.d.setVar("DL_DIR", self.dldir)
322 self.unpackdir = os.path.join(self.tempdir, "unpacked")
323 os.mkdir(self.unpackdir)
324 persistdir = os.path.join(self.tempdir, "persistdata")
325 self.d.setVar("PERSISTENT_DIR", persistdir)
326
327 def tearDown(self):
328 bb.utils.prunedir(self.tempdir)
329
330class MirrorUriTest(FetcherTest):
331
332 replaceuris = {
333 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/")
334 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
335 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
336 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
337 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
338 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
339 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
340 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
341 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
342 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890",
343 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
344 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
345 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
346 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
347 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
348 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
349 ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
350 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
351 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist")
352 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
353 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
354 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
355 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
356 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
357 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
358 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
359 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
360 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
361
362 #Renaming files doesn't work
363 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
364 #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
365 }
366
367 mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \
368 "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \
369 "https://.*/.* file:///someotherpath/downloads/ \n" \
370 "http://.*/.* file:///someotherpath/downloads/ \n"
371
372 def test_urireplace(self):
373 for k, v in self.replaceuris.items():
374 ud = bb.fetch.FetchData(k[0], self.d)
375 ud.setup_localpath(self.d)
376 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2]))
377 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d)
378 self.assertEqual([v], newuris)
379
380 def test_urilist1(self):
381 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
382 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
383 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
384 self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz', 'file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
385
386 def test_urilist2(self):
387 # Catch https:// -> files:// bug
388 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
389 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
390 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
391 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
392
393
394class FetcherLocalTest(FetcherTest):
395 def setUp(self):
396 def touch(fn):
397 with file(fn, 'a'):
398 os.utime(fn, None)
399
400 super(FetcherLocalTest, self).setUp()
401 self.localsrcdir = os.path.join(self.tempdir, 'localsrc')
402 os.makedirs(self.localsrcdir)
403 touch(os.path.join(self.localsrcdir, 'a'))
404 touch(os.path.join(self.localsrcdir, 'b'))
405 os.makedirs(os.path.join(self.localsrcdir, 'dir'))
406 touch(os.path.join(self.localsrcdir, 'dir', 'c'))
407 touch(os.path.join(self.localsrcdir, 'dir', 'd'))
408 os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir'))
409 touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e'))
410 self.d.setVar("FILESPATH", self.localsrcdir)
411
412 def fetchUnpack(self, uris):
413 fetcher = bb.fetch.Fetch(uris, self.d)
414 fetcher.download()
415 fetcher.unpack(self.unpackdir)
416 flst = []
417 for root, dirs, files in os.walk(self.unpackdir):
418 for f in files:
419 flst.append(os.path.relpath(os.path.join(root, f), self.unpackdir))
420 flst.sort()
421 return flst
422
423 def test_local(self):
424 tree = self.fetchUnpack(['file://a', 'file://dir/c'])
425 self.assertEqual(tree, ['a', 'dir/c'])
426
427 def test_local_wildcard(self):
428 tree = self.fetchUnpack(['file://a', 'file://dir/*'])
429 # FIXME: this is broken - it should return ['a', 'dir/c', 'dir/d', 'dir/subdir/e']
430 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=6128
431 self.assertEqual(tree, ['a', 'b', 'dir/c', 'dir/d', 'dir/subdir/e'])
432
433 def test_local_dir(self):
434 tree = self.fetchUnpack(['file://a', 'file://dir'])
435 self.assertEqual(tree, ['a', 'dir/c', 'dir/d', 'dir/subdir/e'])
436
437 def test_local_subdir(self):
438 tree = self.fetchUnpack(['file://dir/subdir'])
439 # FIXME: this is broken - it should return ['dir/subdir/e']
440 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=6129
441 self.assertEqual(tree, ['subdir/e'])
442
443 def test_local_subdir_file(self):
444 tree = self.fetchUnpack(['file://dir/subdir/e'])
445 self.assertEqual(tree, ['dir/subdir/e'])
446
447
448class FetcherNetworkTest(FetcherTest):
449
450 if os.environ.get("BB_SKIP_NETTESTS") == "yes":
451 print("Unset BB_SKIP_NETTESTS to run network tests")
452 else:
453 def test_fetch(self):
454 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
455 fetcher.download()
456 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
457 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
458 self.d.setVar("BB_NO_NETWORK", "1")
459 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
460 fetcher.download()
461 fetcher.unpack(self.unpackdir)
462 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
463 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.1/")), 9)
464
465 def test_fetch_mirror(self):
466 self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
467 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
468 fetcher.download()
469 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
470
471 def test_fetch_premirror(self):
472 self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
473 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
474 fetcher.download()
475 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
476
477 def gitfetcher(self, url1, url2):
478 def checkrevision(self, fetcher):
479 fetcher.unpack(self.unpackdir)
480 revision = bb.process.run("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git")[0].strip()
481 self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
482
483 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
484 self.d.setVar("SRCREV", "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
485 fetcher = bb.fetch.Fetch([url1], self.d)
486 fetcher.download()
487 checkrevision(self, fetcher)
488 # Wipe out the dldir clone and the unpacked source, turn off the network and check mirror tarball works
489 bb.utils.prunedir(self.dldir + "/git2/")
490 bb.utils.prunedir(self.unpackdir)
491 self.d.setVar("BB_NO_NETWORK", "1")
492 fetcher = bb.fetch.Fetch([url2], self.d)
493 fetcher.download()
494 checkrevision(self, fetcher)
495
496 def test_gitfetch(self):
497 url1 = url2 = "git://git.openembedded.org/bitbake"
498 self.gitfetcher(url1, url2)
499
500 def test_gitfetch_goodsrcrev(self):
501 # SRCREV is set but matches rev= parameter
502 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
503 self.gitfetcher(url1, url2)
504
505 def test_gitfetch_badsrcrev(self):
506 # SRCREV is set but does not match rev= parameter
507 url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5"
508 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
509
510 def test_gitfetch_tagandrev(self):
511 # SRCREV is set but does not match rev= parameter
512 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
513 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
514
515 def test_gitfetch_premirror(self):
516 url1 = "git://git.openembedded.org/bitbake"
517 url2 = "git://someserver.org/bitbake"
518 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
519 self.gitfetcher(url1, url2)
520
521 def test_gitfetch_premirror2(self):
522 url1 = url2 = "git://someserver.org/bitbake"
523 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
524 self.gitfetcher(url1, url2)
525
526 def test_gitfetch_premirror3(self):
527 realurl = "git://git.openembedded.org/bitbake"
528 dummyurl = "git://someserver.org/bitbake"
529 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
530 os.chdir(self.tempdir)
531 bb.process.run("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True)
532 self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (dummyurl, self.sourcedir))
533 self.gitfetcher(dummyurl, dummyurl)
534
535 def test_git_submodule(self):
536 fetcher = bb.fetch.Fetch(["gitsm://git.yoctoproject.org/git-submodule-test;rev=f12e57f2edf0aa534cf1616fa983d165a92b0842"], self.d)
537 fetcher.download()
538 # Previous cwd has been deleted
539 os.chdir(os.path.dirname(self.unpackdir))
540 fetcher.unpack(self.unpackdir)
541
542class URLHandle(unittest.TestCase):
543
544 datatable = {
545 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
546 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
547 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}),
548 "git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'})
549 }
550
551 def test_decodeurl(self):
552 for k, v in self.datatable.items():
553 result = bb.fetch.decodeurl(k)
554 self.assertEqual(result, v)
555
556 def test_encodeurl(self):
557 for k, v in self.datatable.items():
558 result = bb.fetch.encodeurl(v)
559 self.assertEqual(result, k)
560
561
562
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py
new file mode 100644
index 0000000000..7c50b1d786
--- /dev/null
+++ b/bitbake/lib/bb/tests/utils.py
@@ -0,0 +1,53 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Tests for utils.py
5#
6# Copyright (C) 2012 Richard Purdie
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21
22import unittest
23import bb
24
25class VerCmpString(unittest.TestCase):
26
27 def test_vercmpstring(self):
28 result = bb.utils.vercmp_string('1', '2')
29 self.assertTrue(result < 0)
30 result = bb.utils.vercmp_string('2', '1')
31 self.assertTrue(result > 0)
32 result = bb.utils.vercmp_string('1', '1.0')
33 self.assertTrue(result < 0)
34 result = bb.utils.vercmp_string('1', '1.1')
35 self.assertTrue(result < 0)
36 result = bb.utils.vercmp_string('1.1', '1_p2')
37 self.assertTrue(result < 0)
38
39 def test_explode_dep_versions(self):
40 correctresult = {"foo" : ["= 1.10"]}
41 result = bb.utils.explode_dep_versions2("foo (= 1.10)")
42 self.assertEqual(result, correctresult)
43 result = bb.utils.explode_dep_versions2("foo (=1.10)")
44 self.assertEqual(result, correctresult)
45 result = bb.utils.explode_dep_versions2("foo ( = 1.10)")
46 self.assertEqual(result, correctresult)
47 result = bb.utils.explode_dep_versions2("foo ( =1.10)")
48 self.assertEqual(result, correctresult)
49 result = bb.utils.explode_dep_versions2("foo ( = 1.10 )")
50 self.assertEqual(result, correctresult)
51 result = bb.utils.explode_dep_versions2("foo ( =1.10 )")
52 self.assertEqual(result, correctresult)
53
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
new file mode 100644
index 0000000000..751a2d7a23
--- /dev/null
+++ b/bitbake/lib/bb/tinfoil.py
@@ -0,0 +1,96 @@
1# tinfoil: a simple wrapper around cooker for bitbake-based command-line utilities
2#
3# Copyright (C) 2012 Intel Corporation
4# Copyright (C) 2011 Mentor Graphics Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import logging
20import warnings
21import os
22import sys
23
24import bb.cache
25import bb.cooker
26import bb.providers
27import bb.utils
28from bb.cooker import state, BBCooker
29from bb.cookerdata import CookerConfiguration, ConfigParameters
30import bb.fetch2
31
32class Tinfoil:
33 def __init__(self, output=sys.stdout):
34 # Needed to avoid deprecation warnings with python 2.6
35 warnings.filterwarnings("ignore", category=DeprecationWarning)
36
37 # Set up logging
38 self.logger = logging.getLogger('BitBake')
39 console = logging.StreamHandler(output)
40 bb.msg.addDefaultlogFilter(console)
41 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
42 if output.isatty():
43 format.enable_color()
44 console.setFormatter(format)
45 self.logger.addHandler(console)
46
47 self.config = CookerConfiguration()
48 configparams = TinfoilConfigParameters(parse_only=True)
49 self.config.setConfigParameters(configparams)
50 self.config.setServerRegIdleCallback(self.register_idle_function)
51 self.cooker = BBCooker(self.config)
52 self.config_data = self.cooker.data
53 bb.providers.logger.setLevel(logging.ERROR)
54 self.cooker_data = None
55
56 def register_idle_function(self, function, data):
57 pass
58
59 def parseRecipes(self):
60 sys.stderr.write("Parsing recipes..")
61 self.logger.setLevel(logging.WARNING)
62
63 try:
64 while self.cooker.state in (state.initial, state.parsing):
65 self.cooker.updateCache()
66 except KeyboardInterrupt:
67 self.cooker.shutdown()
68 self.cooker.updateCache()
69 sys.exit(2)
70
71 self.logger.setLevel(logging.INFO)
72 sys.stderr.write("done.\n")
73
74 self.cooker_data = self.cooker.recipecache
75
76 def prepare(self, config_only = False):
77 if not self.cooker_data:
78 if config_only:
79 self.cooker.parseConfiguration()
80 self.cooker_data = self.cooker.recipecache
81 else:
82 self.parseRecipes()
83
84class TinfoilConfigParameters(ConfigParameters):
85
86 def __init__(self, **options):
87 self.initial_options = options
88 super(TinfoilConfigParameters, self).__init__()
89
90 def parseCommandLine(self):
91 class DummyOptions:
92 def __init__(self, initial_options):
93 for key, val in initial_options.items():
94 setattr(self, key, val)
95
96 return DummyOptions(self.initial_options), None
diff --git a/bitbake/lib/bb/ui/__init__.py b/bitbake/lib/bb/ui/__init__.py
new file mode 100644
index 0000000000..a4805ed028
--- /dev/null
+++ b/bitbake/lib/bb/ui/__init__.py
@@ -0,0 +1,17 @@
1#
2# BitBake UI Implementation
3#
4# Copyright (C) 2006-2007 Richard Purdie
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
new file mode 100644
index 0000000000..69655709d3
--- /dev/null
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -0,0 +1,964 @@
1#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import datetime
20import sys
21import bb
22import re
23import ast
24
25os.environ.setdefault("DJANGO_SETTINGS_MODULE", "toaster.toastermain.settings")
26
27import toaster.toastermain.settings as toaster_django_settings
28from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
29from toaster.orm.models import Target_Image_File
30from toaster.orm.models import Variable, VariableHistory
31from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
32from toaster.orm.models import Task_Dependency, Package_Dependency
33from toaster.orm.models import Recipe_Dependency
34from bb.msg import BBLogFormatter as format
35
36class NotExisting(Exception):
37 pass
38
39class ORMWrapper(object):
40 """ This class creates the dictionaries needed to store information in the database
41 following the format defined by the Django models. It is also used to save this
42 information in the database.
43 """
44
45 def __init__(self):
46 pass
47
48
49 def create_build_object(self, build_info):
50 assert 'machine' in build_info
51 assert 'distro' in build_info
52 assert 'distro_version' in build_info
53 assert 'started_on' in build_info
54 assert 'cooker_log_path' in build_info
55 assert 'build_name' in build_info
56 assert 'bitbake_version' in build_info
57
58 build = Build.objects.create(
59 machine=build_info['machine'],
60 distro=build_info['distro'],
61 distro_version=build_info['distro_version'],
62 started_on=build_info['started_on'],
63 completed_on=build_info['started_on'],
64 cooker_log_path=build_info['cooker_log_path'],
65 build_name=build_info['build_name'],
66 bitbake_version=build_info['bitbake_version'])
67
68 return build
69
70 def create_target_objects(self, target_info):
71 assert 'build' in target_info
72 assert 'targets' in target_info
73
74 targets = []
75 for tgt_name in target_info['targets']:
76 tgt_object = Target.objects.create( build = target_info['build'],
77 target = tgt_name,
78 is_image = False,
79 );
80 targets.append(tgt_object)
81 return targets
82
83 def update_build_object(self, build, errors, warnings, taskfailures):
84 assert isinstance(build,Build)
85 assert isinstance(errors, int)
86 assert isinstance(warnings, int)
87
88 outcome = Build.SUCCEEDED
89 if errors or taskfailures:
90 outcome = Build.FAILED
91
92 build.completed_on = datetime.datetime.now()
93 build.timespent = int((build.completed_on - build.started_on).total_seconds())
94 build.errors_no = errors
95 build.warnings_no = warnings
96 build.outcome = outcome
97 build.save()
98
99 def update_target_object(self, target, license_manifest_path):
100
101 target.license_manifest_path = license_manifest_path
102 target.save()
103
104 def get_update_task_object(self, task_information, must_exist = False):
105 assert 'build' in task_information
106 assert 'recipe' in task_information
107 assert 'task_name' in task_information
108
109 task_object, created = Task.objects.get_or_create(
110 build=task_information['build'],
111 recipe=task_information['recipe'],
112 task_name=task_information['task_name'],
113 )
114
115 if must_exist and created:
116 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
117 task_object.delete()
118 raise NotExisting("Task object created when expected to exist", task_information)
119
120 for v in vars(task_object):
121 if v in task_information.keys():
122 vars(task_object)[v] = task_information[v]
123
124 # update setscene-related information
125 if 1 == Task.objects.related_setscene(task_object).count():
126 if task_object.outcome == Task.OUTCOME_COVERED:
127 task_object.outcome = Task.OUTCOME_CACHED
128
129 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
130 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
131 if outcome_task_setscene == Task.OUTCOME_SUCCESS:
132 task_object.sstate_result = Task.SSTATE_RESTORED
133 elif outcome_task_setscene == Task.OUTCOME_FAILED:
134 task_object.sstate_result = Task.SSTATE_FAILED
135
136 # mark down duration if we have a start time and a current time
137 if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
138 duration = task_information['end_time'] - task_information['start_time']
139 task_object.elapsed_time = duration
140
141 task_object.save()
142 return task_object
143
144
145 def get_update_recipe_object(self, recipe_information, must_exist = False):
146 assert 'layer_version' in recipe_information
147 assert 'file_path' in recipe_information
148
149
150 recipe_object, created = Recipe.objects.get_or_create(
151 layer_version=recipe_information['layer_version'],
152 file_path=recipe_information['file_path'])
153
154 if must_exist and created:
155 recipe_object.delete()
156 raise NotExisting("Recipe object created when expected to exist", recipe_information)
157
158 for v in vars(recipe_object):
159 if v in recipe_information.keys():
160 vars(recipe_object)[v] = recipe_information[v]
161
162 recipe_object.save()
163
164 return recipe_object
165
166 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
167 assert isinstance(build_obj, Build)
168 assert isinstance(layer_obj, Layer)
169 assert 'branch' in layer_version_information
170 assert 'commit' in layer_version_information
171 assert 'priority' in layer_version_information
172
173 layer_version_object, created = Layer_Version.objects.get_or_create(
174 build = build_obj,
175 layer = layer_obj,
176 branch = layer_version_information['branch'],
177 commit = layer_version_information['commit'],
178 priority = layer_version_information['priority']
179 )
180
181 return layer_version_object
182
183 def get_update_layer_object(self, layer_information):
184 assert 'name' in layer_information
185 assert 'local_path' in layer_information
186 assert 'layer_index_url' in layer_information
187
188 layer_object, created = Layer.objects.get_or_create(
189 name=layer_information['name'],
190 local_path=layer_information['local_path'],
191 layer_index_url=layer_information['layer_index_url'])
192
193 return layer_object
194
195 def save_target_file_information(self, build_obj, target_obj, filedata):
196 assert isinstance(build_obj, Build)
197 assert isinstance(target_obj, Target)
198 dirs = filedata['dirs']
199 files = filedata['files']
200 syms = filedata['syms']
201
202 # we insert directories, ordered by name depth
203 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
204 (user, group, size) = d[1:4]
205 permission = d[0][1:]
206 path = d[4].lstrip(".")
207 if len(path) == 0:
208 # we create the root directory as a special case
209 path = "/"
210 tf_obj = Target_File.objects.create(
211 target = target_obj,
212 path = path,
213 size = size,
214 inodetype = Target_File.ITYPE_DIRECTORY,
215 permission = permission,
216 owner = user,
217 group = group,
218 )
219 tf_obj.directory = tf_obj
220 tf_obj.save()
221 continue
222 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
223 if len(parent_path) == 0:
224 parent_path = "/"
225 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
226 tf_obj = Target_File.objects.create(
227 target = target_obj,
228 path = path,
229 size = size,
230 inodetype = Target_File.ITYPE_DIRECTORY,
231 permission = permission,
232 owner = user,
233 group = group,
234 directory = parent_obj)
235
236
237 # we insert files
238 for d in files:
239 (user, group, size) = d[1:4]
240 permission = d[0][1:]
241 path = d[4].lstrip(".")
242 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
243 inodetype = Target_File.ITYPE_REGULAR
244 if d[0].startswith('b'):
245 inodetype = Target_File.ITYPE_BLOCK
246 if d[0].startswith('c'):
247 inodetype = Target_File.ITYPE_CHARACTER
248 if d[0].startswith('p'):
249 inodetype = Target_File.ITYPE_FIFO
250
251 tf_obj = Target_File.objects.create(
252 target = target_obj,
253 path = path,
254 size = size,
255 inodetype = inodetype,
256 permission = permission,
257 owner = user,
258 group = group)
259 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
260 tf_obj.directory = parent_obj
261 tf_obj.save()
262
263 # we insert symlinks
264 for d in syms:
265 (user, group, size) = d[1:4]
266 permission = d[0][1:]
267 path = d[4].lstrip(".")
268 filetarget_path = d[6]
269
270 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
271 if not filetarget_path.startswith("/"):
272 # we have a relative path, get a normalized absolute one
273 filetarget_path = parent_path + "/" + filetarget_path
274 fcp = filetarget_path.split("/")
275 fcpl = []
276 for i in fcp:
277 if i == "..":
278 fcpl.pop()
279 else:
280 fcpl.append(i)
281 filetarget_path = "/".join(fcpl)
282
283 try:
284 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
285 except Exception as e:
286 # we might have an invalid link; no way to detect this. just set it to None
287 filetarget_obj = None
288
289 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
290
291 tf_obj = Target_File.objects.create(
292 target = target_obj,
293 path = path,
294 size = size,
295 inodetype = Target_File.ITYPE_SYMLINK,
296 permission = permission,
297 owner = user,
298 group = group,
299 directory = parent_obj,
300 sym_target = filetarget_obj)
301
302
303 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes):
304 assert isinstance(build_obj, Build)
305 assert isinstance(target_obj, Target)
306
307 errormsg = ""
308 for p in packagedict:
309 searchname = p
310 if 'OPKGN' in pkgpnmap[p].keys():
311 searchname = pkgpnmap[p]['OPKGN']
312
313 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
314 if created:
315 # package was not build in the current build, but
316 # fill in everything we can from the runtime-reverse package data
317 try:
318 packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
319 packagedict[p]['object'].version = pkgpnmap[p]['PV']
320 packagedict[p]['object'].installed_name = p
321 packagedict[p]['object'].revision = pkgpnmap[p]['PR']
322 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
323 packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
324 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
325 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
326 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
327
328 # no files recorded for this package, so save files info
329 for targetpath in pkgpnmap[p]['FILES_INFO']:
330 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
331 Package_File.objects.create( package = packagedict[p]['object'],
332 path = targetpath,
333 size = targetfilesize)
334 except KeyError as e:
335 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
336
337 # save disk installed size
338 packagedict[p]['object'].installed_size = packagedict[p]['size']
339 packagedict[p]['object'].save()
340
341 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
342
343 for p in packagedict:
344 for (px,deptype) in packagedict[p]['depends']:
345 if deptype == 'depends':
346 tdeptype = Package_Dependency.TYPE_TRDEPENDS
347 elif deptype == 'recommends':
348 tdeptype = Package_Dependency.TYPE_TRECOMMENDS
349
350 Package_Dependency.objects.create( package = packagedict[p]['object'],
351 depends_on = packagedict[px]['object'],
352 dep_type = tdeptype,
353 target = target_obj);
354
355 if (len(errormsg) > 0):
356 raise Exception(errormsg)
357
358 def save_target_image_file_information(self, target_obj, file_name, file_size):
359 target_image_file = Target_Image_File.objects.create( target = target_obj,
360 file_name = file_name,
361 file_size = file_size)
362 target_image_file.save()
363
364 def create_logmessage(self, log_information):
365 assert 'build' in log_information
366 assert 'level' in log_information
367 assert 'message' in log_information
368
369 log_object = LogMessage.objects.create(
370 build = log_information['build'],
371 level = log_information['level'],
372 message = log_information['message'])
373
374 for v in vars(log_object):
375 if v in log_information.keys():
376 vars(log_object)[v] = log_information[v]
377
378 return log_object.save()
379
380
381 def save_build_package_information(self, build_obj, package_info, recipes):
382 assert isinstance(build_obj, Build)
383
384 # create and save the object
385 pname = package_info['PKG']
386 if 'OPKGN' in package_info.keys():
387 pname = package_info['OPKGN']
388
389 bp_object, created = Package.objects.get_or_create( build = build_obj,
390 name = pname )
391
392 bp_object.installed_name = package_info['PKG']
393 bp_object.recipe = recipes[package_info['PN']]
394 bp_object.version = package_info['PKGV']
395 bp_object.revision = package_info['PKGR']
396 bp_object.summary = package_info['SUMMARY']
397 bp_object.description = package_info['DESCRIPTION']
398 bp_object.size = int(package_info['PKGSIZE'])
399 bp_object.section = package_info['SECTION']
400 bp_object.license = package_info['LICENSE']
401 bp_object.save()
402
403 # save any attached file information
404 for path in package_info['FILES_INFO']:
405 fo = Package_File.objects.create( package = bp_object,
406 path = path,
407 size = package_info['FILES_INFO'][path] )
408
409 def _po_byname(p):
410 pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
411 if created:
412 pkg.size = -1
413 pkg.save()
414 return pkg
415
416 # save soft dependency information
417 if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
418 for p in bb.utils.explode_deps(package_info['RDEPENDS']):
419 Package_Dependency.objects.get_or_create( package = bp_object,
420 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)
421 if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
422 for p in bb.utils.explode_deps(package_info['RPROVIDES']):
423 Package_Dependency.objects.get_or_create( package = bp_object,
424 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)
425 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
426 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
427 Package_Dependency.objects.get_or_create( package = bp_object,
428 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)
429 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
430 for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
431 Package_Dependency.objects.get_or_create( package = bp_object,
432 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)
433 if 'RREPLACES' in package_info and package_info['RREPLACES']:
434 for p in bb.utils.explode_deps(package_info['RREPLACES']):
435 Package_Dependency.objects.get_or_create( package = bp_object,
436 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)
437 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
438 for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
439 Package_Dependency.objects.get_or_create( package = bp_object,
440 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)
441
442 return bp_object
443
444 def save_build_variables(self, build_obj, vardump):
445 assert isinstance(build_obj, Build)
446
447 for k in vardump:
448 desc = vardump[k]['doc'];
449 if desc is None:
450 var_words = [word for word in k.split('_')]
451 root_var = "_".join([word for word in var_words if word.isupper()])
452 if root_var and root_var != k and root_var in vardump:
453 desc = vardump[root_var]['doc']
454 if desc is None:
455 desc = ''
456 if desc:
457 helptext_obj = HelpText.objects.create(build=build_obj,
458 area=HelpText.VARIABLE,
459 key=k,
460 text=desc)
461 if not bool(vardump[k]['func']):
462 value = vardump[k]['v'];
463 if value is None:
464 value = ''
465 variable_obj = Variable.objects.create( build = build_obj,
466 variable_name = k,
467 variable_value = value,
468 description = desc)
469 for vh in vardump[k]['history']:
470 if not 'documentation.conf' in vh['file']:
471 VariableHistory.objects.create( variable = variable_obj,
472 file_name = vh['file'],
473 line_number = vh['line'],
474 operation = vh['op'])
475
476class BuildInfoHelper(object):
477 """ This class gathers the build information from the server and sends it
478 towards the ORM wrapper for storing in the database
479 It is instantiated once per build
480 Keeps in memory all data that needs matching before writing it to the database
481 """
482
483 def __init__(self, server, has_build_history = False):
484 self._configure_django()
485 self.internal_state = {}
486 self.internal_state['taskdata'] = {}
487 self.task_order = 0
488 self.server = server
489 self.orm_wrapper = ORMWrapper()
490 self.has_build_history = has_build_history
491 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
492
493 def _configure_django(self):
494 # Add toaster to sys path for importing modules
495 sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'toaster'))
496
497 ###################
498 ## methods to convert event/external info into objects that the ORM layer uses
499
500
501 def _get_build_information(self):
502 build_info = {}
503 # Generate an identifier for each new build
504
505 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
506 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
507 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
508 build_info['started_on'] = datetime.datetime.now()
509 build_info['completed_on'] = datetime.datetime.now()
510 build_info['cooker_log_path'] = self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0]
511 build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
512 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
513
514 return build_info
515
516 def _get_task_information(self, event, recipe):
517 assert 'taskname' in vars(event)
518
519 task_information = {}
520 task_information['build'] = self.internal_state['build']
521 task_information['outcome'] = Task.OUTCOME_NA
522 task_information['recipe'] = recipe
523 task_information['task_name'] = event.taskname
524 try:
525 # some tasks don't come with a hash. and that's ok
526 task_information['sstate_checksum'] = event.taskhash
527 except AttributeError:
528 pass
529 return task_information
530
531 def _get_layer_version_for_path(self, path):
532 assert path.startswith("/")
533 assert 'build' in self.internal_state
534
535 def _slkey(layer_version):
536 assert isinstance(layer_version, Layer_Version)
537 return len(layer_version.layer.local_path)
538
539 # Heuristics: we always match recipe to the deepest layer path that
540 # we can match to the recipe file path
541 for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey):
542 if (path.startswith(bl.layer.local_path)):
543 return bl
544
545 #TODO: if we get here, we didn't read layers correctly
546 assert False
547 return None
548
549 def _get_recipe_information_from_taskfile(self, taskfile):
550 localfilepath = taskfile.split(":")[-1]
551 layer_version_obj = self._get_layer_version_for_path(localfilepath)
552
553 recipe_info = {}
554 recipe_info['layer_version'] = layer_version_obj
555 recipe_info['file_path'] = taskfile
556
557 return recipe_info
558
559 def _get_path_information(self, task_object):
560 assert isinstance(task_object, Task)
561 build_stats_format = "{tmpdir}/buildstats/{target}-{machine}/{buildname}/{package}/"
562 build_stats_path = []
563
564 for t in self.internal_state['targets']:
565 target = t.target
566 machine = self.internal_state['build'].machine
567 buildname = self.internal_state['build'].build_name
568 pe, pv = task_object.recipe.version.split(":",1)
569 if len(pe) > 0:
570 package = task_object.recipe.name + "-" + pe + "_" + pv
571 else:
572 package = task_object.recipe.name + "-" + pv
573
574 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir, target=target,
575 machine=machine, buildname=buildname,
576 package=package))
577
578 return build_stats_path
579
580 def _remove_redundant(self, string):
581 ret = []
582 for i in string.split():
583 if i not in ret:
584 ret.append(i)
585 return " ".join(sorted(ret))
586
587
588 ################################
589 ## external available methods to store information
590
591 def store_layer_info(self, event):
592 assert 'data' in vars(event)
593 layerinfos = event.data
594 self.internal_state['lvs'] = {}
595 for layer in layerinfos:
596 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version']
597
598
599 def store_started_build(self, event):
600 assert '_pkgs' in vars(event)
601 build_information = self._get_build_information()
602
603 build_obj = self.orm_wrapper.create_build_object(build_information)
604 self.internal_state['build'] = build_obj
605
606 # save layer version information for this build
607 for layer_obj in self.internal_state['lvs']:
608 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
609
610 del self.internal_state['lvs']
611
612 # create target information
613 target_information = {}
614 target_information['targets'] = event._pkgs
615 target_information['build'] = build_obj
616
617 self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information)
618
619 # Save build configuration
620 self.orm_wrapper.save_build_variables(build_obj, self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0])
621
622 def update_target_image_file(self, event):
623 image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0]
624 for t in self.internal_state['targets']:
625 if t.is_image == True:
626 output_files = list(event.data.viewkeys())
627 for output in output_files:
628 if t.target in output and output.split('.rootfs.')[1] in image_fstypes:
629 self.orm_wrapper.save_target_image_file_information(t, output, event.data[output])
630
631 def update_build_information(self, event, errors, warnings, taskfailures):
632 if 'build' in self.internal_state:
633 self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures)
634
635
636 def store_license_manifest_path(self, event):
637 deploy_dir = event.data['deploy_dir']
638 image_name = event.data['image_name']
639 path = deploy_dir + "/licenses/" + image_name + "/"
640 for target in self.internal_state['targets']:
641 if target.target in image_name:
642 self.orm_wrapper.update_target_object(target, path)
643
644
645 def store_started_task(self, event):
646 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
647 assert 'taskfile' in vars(event)
648 localfilepath = event.taskfile.split(":")[-1]
649 assert localfilepath.startswith("/")
650
651 identifier = event.taskfile + ":" + event.taskname
652
653 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
654 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
655
656 task_information = self._get_task_information(event, recipe)
657 task_information['outcome'] = Task.OUTCOME_NA
658
659 if isinstance(event, bb.runqueue.runQueueTaskSkipped):
660 assert 'reason' in vars(event)
661 task_information['task_executed'] = False
662 if event.reason == "covered":
663 task_information['outcome'] = Task.OUTCOME_COVERED
664 if event.reason == "existing":
665 task_information['outcome'] = Task.OUTCOME_PREBUILT
666 else:
667 task_information['task_executed'] = True
668 if 'noexec' in vars(event) and event.noexec == True:
669 task_information['task_executed'] = False
670 task_information['outcome'] = Task.OUTCOME_EMPTY
671 task_information['script_type'] = Task.CODING_NA
672
673 # do not assign order numbers to scene tasks
674 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
675 self.task_order += 1
676 task_information['order'] = self.task_order
677
678 task_obj = self.orm_wrapper.get_update_task_object(task_information)
679
680 self.internal_state['taskdata'][identifier] = {
681 'outcome': task_information['outcome'],
682 }
683
684
685 def store_tasks_stats(self, event):
686 for (taskfile, taskname, taskstats, recipename) in event.data:
687 localfilepath = taskfile.split(":")[-1]
688 assert localfilepath.startswith("/")
689
690 recipe_information = self._get_recipe_information_from_taskfile(taskfile)
691 recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
692 file_path__endswith = recipe_information['file_path'],
693 name = recipename)
694
695 task_information = {}
696 task_information['build'] = self.internal_state['build']
697 task_information['recipe'] = recipe_object
698 task_information['task_name'] = taskname
699 task_information['cpu_usage'] = taskstats['cpu_usage']
700 task_information['disk_io'] = taskstats['disk_io']
701 task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist
702
703 def update_and_store_task(self, event):
704 assert 'taskfile' in vars(event)
705 localfilepath = event.taskfile.split(":")[-1]
706 assert localfilepath.startswith("/")
707
708 identifier = event.taskfile + ":" + event.taskname
709 if not identifier in self.internal_state['taskdata']:
710 if isinstance(event, bb.build.TaskBase):
711 # we do a bit of guessing
712 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
713 if len(candidates) == 1:
714 identifier = candidates[0]
715
716 assert identifier in self.internal_state['taskdata']
717 identifierlist = identifier.split(":")
718 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
719 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
720 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
721 task_information = self._get_task_information(event,recipe)
722
723 if 'time' in vars(event):
724 if not 'start_time' in self.internal_state['taskdata'][identifier]:
725 self.internal_state['taskdata'][identifier]['start_time'] = event.time
726 else:
727 task_information['end_time'] = event.time
728 task_information['start_time'] = self.internal_state['taskdata'][identifier]['start_time']
729
730 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
731
732 if 'logfile' in vars(event):
733 task_information['logfile'] = event.logfile
734
735 if '_message' in vars(event):
736 task_information['message'] = event._message
737
738 if 'taskflags' in vars(event):
739 # with TaskStarted, we get even more information
740 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
741 task_information['script_type'] = Task.CODING_PYTHON
742 else:
743 task_information['script_type'] = Task.CODING_SHELL
744
745 if task_information['outcome'] == Task.OUTCOME_NA:
746 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
747 task_information['outcome'] = Task.OUTCOME_SUCCESS
748 del self.internal_state['taskdata'][identifier]
749
750 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
751 task_information['outcome'] = Task.OUTCOME_FAILED
752 del self.internal_state['taskdata'][identifier]
753
754 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
755
756
757 def store_missed_state_tasks(self, event):
758 for (fn, taskname, taskhash, sstatefile) in event.data['missed']:
759
760 identifier = fn + taskname + "_setscene"
761 recipe_information = self._get_recipe_information_from_taskfile(fn)
762 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
763 class MockEvent: pass
764 mevent = MockEvent()
765 mevent.taskname = taskname
766 mevent.taskhash = taskhash
767 task_information = self._get_task_information(mevent,recipe)
768
769 task_information['start_time'] = datetime.datetime.now()
770 task_information['outcome'] = Task.OUTCOME_NA
771 task_information['sstate_checksum'] = taskhash
772 task_information['sstate_result'] = Task.SSTATE_MISS
773 task_information['path_to_sstate_obj'] = sstatefile
774
775 self.orm_wrapper.get_update_task_object(task_information)
776
777 for (fn, taskname, taskhash, sstatefile) in event.data['found']:
778
779 identifier = fn + taskname + "_setscene"
780 recipe_information = self._get_recipe_information_from_taskfile(fn)
781 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
782 class MockEvent: pass
783 mevent = MockEvent()
784 mevent.taskname = taskname
785 mevent.taskhash = taskhash
786 task_information = self._get_task_information(mevent,recipe)
787
788 task_information['path_to_sstate_obj'] = sstatefile
789
790 self.orm_wrapper.get_update_task_object(task_information)
791
792
793 def store_target_package_data(self, event):
794 assert 'data' in vars(event)
795 # for all image targets
796 for target in self.internal_state['targets']:
797 if target.is_image:
798 try:
799 pkgdata = event.data['pkgdata']
800 imgdata = event.data['imgdata'][target.target]
801 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
802 filedata = event.data['filedata'][target.target]
803 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
804 except KeyError:
805 # we must have not got the data for this image, nothing to save
806 pass
807
808
809
810 def store_dependency_information(self, event):
811 assert '_depgraph' in vars(event)
812 assert 'layer-priorities' in event._depgraph
813 assert 'pn' in event._depgraph
814 assert 'tdepends' in event._depgraph
815
816 errormsg = ""
817
818 # save layer version priorities
819 if 'layer-priorities' in event._depgraph.keys():
820 for lv in event._depgraph['layer-priorities']:
821 (name, path, regexp, priority) = lv
822 layer_version_obj = self._get_layer_version_for_path(path[1:]) # paths start with a ^
823 assert layer_version_obj is not None
824 layer_version_obj.priority = priority
825 layer_version_obj.save()
826
827 # save recipe information
828 self.internal_state['recipes'] = {}
829 for pn in event._depgraph['pn']:
830
831 file_name = event._depgraph['pn'][pn]['filename']
832 layer_version_obj = self._get_layer_version_for_path(file_name.split(":")[-1])
833
834 assert layer_version_obj is not None
835
836 recipe_info = {}
837 recipe_info['name'] = pn
838 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
839 recipe_info['layer_version'] = layer_version_obj
840 recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
841 recipe_info['license'] = event._depgraph['pn'][pn]['license']
842 recipe_info['description'] = event._depgraph['pn'][pn]['description']
843 recipe_info['section'] = event._depgraph['pn'][pn]['section']
844 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
845 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
846 recipe_info['file_path'] = file_name
847 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
848 recipe.is_image = False
849 if 'inherits' in event._depgraph['pn'][pn].keys():
850 for cls in event._depgraph['pn'][pn]['inherits']:
851 if cls.endswith('/image.bbclass'):
852 recipe.is_image = True
853 break
854 if recipe.is_image:
855 for t in self.internal_state['targets']:
856 if pn == t.target:
857 t.is_image = True
858 t.save()
859 self.internal_state['recipes'][pn] = recipe
860
861 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
862
863 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
864
865 # save recipe dependency
866 # buildtime
867 for recipe in event._depgraph['depends']:
868 try:
869 target = self.internal_state['recipes'][recipe]
870 for dep in event._depgraph['depends'][recipe]:
871 dependency = self.internal_state['recipes'][dep]
872 Recipe_Dependency.objects.get_or_create( recipe = target,
873 depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS)
874 except KeyError as e:
875 if e not in assume_provided and not str(e).startswith("virtual/"):
876 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
877
878 # save all task information
879 def _save_a_task(taskdesc):
880 spec = re.split(r'\.', taskdesc);
881 pn = ".".join(spec[0:-1])
882 taskname = spec[-1]
883 e = event
884 e.taskname = pn
885 recipe = self.internal_state['recipes'][pn]
886 task_info = self._get_task_information(e, recipe)
887 task_info['task_name'] = taskname
888 task_obj = self.orm_wrapper.get_update_task_object(task_info)
889 return task_obj
890
891 # create tasks
892 tasks = {}
893 for taskdesc in event._depgraph['tdepends']:
894 tasks[taskdesc] = _save_a_task(taskdesc)
895
896 # create dependencies between tasks
897 for taskdesc in event._depgraph['tdepends']:
898 target = tasks[taskdesc]
899 for taskdep in event._depgraph['tdepends'][taskdesc]:
900 if taskdep not in tasks:
901 # Fetch tasks info is not collected previously
902 dep = _save_a_task(taskdep)
903 else:
904 dep = tasks[taskdep]
905 Task_Dependency.objects.get_or_create( task = target, depends_on = dep )
906
907 if (len(errormsg) > 0):
908 raise Exception(errormsg)
909
910
911 def store_build_package_information(self, event):
912 assert 'data' in vars(event)
913 package_info = event.data
914 self.orm_wrapper.save_build_package_information(self.internal_state['build'],
915 package_info,
916 self.internal_state['recipes'],
917 )
918
919 def _store_log_information(self, level, text):
920 log_information = {}
921 log_information['build'] = self.internal_state['build']
922 log_information['level'] = level
923 log_information['message'] = text
924 self.orm_wrapper.create_logmessage(log_information)
925
926 def store_log_info(self, text):
927 self._store_log_information(LogMessage.INFO, text)
928
929 def store_log_warn(self, text):
930 self._store_log_information(LogMessage.WARNING, text)
931
932 def store_log_error(self, text):
933 self._store_log_information(LogMessage.ERROR, text)
934
935 def store_log_event(self, event):
936 if 'build' in self.internal_state and 'backlog' in self.internal_state:
937 if len(self.internal_state['backlog']):
938 tempevent = self.internal_state['backlog'].pop()
939 print "Saving stored event ", tempevent
940 self.store_log_event(tempevent)
941 else:
942 del self.internal_state['backlog']
943
944 if event.levelno < format.WARNING:
945 return
946
947 if not 'build' in self.internal_state:
948 print "Save event for later"
949 if not 'backlog' in self.internal_state:
950 self.internal_state['backlog'] = []
951 self.internal_state['backlog'].append(event)
952
953 return
954 log_information = {}
955 log_information['build'] = self.internal_state['build']
956 if event.levelno >= format.ERROR:
957 log_information['level'] = LogMessage.ERROR
958 elif event.levelno == format.WARNING:
959 log_information['level'] = LogMessage.WARNING
960 log_information['message'] = event.msg
961 log_information['pathname'] = event.pathname
962 log_information['lineno'] = event.lineno
963 self.orm_wrapper.create_logmessage(log_information)
964
diff --git a/bitbake/lib/bb/ui/crumbs/__init__.py b/bitbake/lib/bb/ui/crumbs/__init__.py
new file mode 100644
index 0000000000..b7cbe1a4f3
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/__init__.py
@@ -0,0 +1,17 @@
1#
2# Gtk+ UI pieces for BitBake
3#
4# Copyright (C) 2006-2007 Richard Purdie
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/bitbake/lib/bb/ui/crumbs/builddetailspage.py b/bitbake/lib/bb/ui/crumbs/builddetailspage.py
new file mode 100755
index 0000000000..7fc690e2fa
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/builddetailspage.py
@@ -0,0 +1,437 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import pango
25import gobject
26import bb.process
27from bb.ui.crumbs.progressbar import HobProgressBar
28from bb.ui.crumbs.hobwidget import hic, HobNotebook, HobAltButton, HobWarpCellRendererText, HobButton, HobInfoButton
29from bb.ui.crumbs.runningbuild import RunningBuildTreeView
30from bb.ui.crumbs.runningbuild import BuildFailureTreeView
31from bb.ui.crumbs.hobpages import HobPage
32from bb.ui.crumbs.hobcolor import HobColors
33
34class BuildConfigurationTreeView(gtk.TreeView):
35 def __init__ (self):
36 gtk.TreeView.__init__(self)
37 self.set_rules_hint(False)
38 self.set_headers_visible(False)
39 self.set_property("hover-expand", True)
40 self.get_selection().set_mode(gtk.SELECTION_SINGLE)
41
42 # The icon that indicates whether we're building or failed.
43 renderer0 = gtk.CellRendererText()
44 renderer0.set_property('font-desc', pango.FontDescription('courier bold 12'))
45 col0 = gtk.TreeViewColumn ("Name", renderer0, text=0)
46 self.append_column (col0)
47
48 # The message of configuration.
49 renderer1 = HobWarpCellRendererText(col_number=1)
50 col1 = gtk.TreeViewColumn ("Values", renderer1, text=1)
51 self.append_column (col1)
52
53 def set_vars(self, key="", var=[""]):
54 d = {}
55 if type(var) == str:
56 d = {key: [var]}
57 elif type(var) == list and len(var) > 1:
58 #create the sub item line
59 l = []
60 text = ""
61 for item in var:
62 text = " - " + item
63 l.append(text)
64 d = {key: var}
65
66 return d
67
68 def set_config_model(self, show_vars):
69 listmodel = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
70 parent = None
71 for var in show_vars:
72 for subitem in var.items():
73 name = subitem[0]
74 is_parent = True
75 for value in subitem[1]:
76 if is_parent:
77 parent = listmodel.append(parent, (name, value))
78 is_parent = False
79 else:
80 listmodel.append(parent, (None, value))
81 name = " - "
82 parent = None
83 # renew the tree model after get the configuration messages
84 self.set_model(listmodel)
85
86 def show(self, src_config_info, src_params):
87 vars = []
88 vars.append(self.set_vars("BB version:", src_params.bb_version))
89 vars.append(self.set_vars("Target arch:", src_params.target_arch))
90 vars.append(self.set_vars("Target OS:", src_params.target_os))
91 vars.append(self.set_vars("Machine:", src_config_info.curr_mach))
92 vars.append(self.set_vars("Distro:", src_config_info.curr_distro))
93 vars.append(self.set_vars("Distro version:", src_params.distro_version))
94 vars.append(self.set_vars("SDK machine:", src_config_info.curr_sdk_machine))
95 vars.append(self.set_vars("Tune features:", src_params.tune_pkgarch))
96 vars.append(self.set_vars("Layers:", src_config_info.layers))
97
98 for path in src_config_info.layers:
99 import os, os.path
100 if os.path.exists(path):
101 branch = bb.process.run('cd %s; git branch | grep "^* " | tr -d "* "' % path)[0]
102 if branch.startswith("fatal:"):
103 branch = "(unknown)"
104 if branch:
105 branch = branch.strip('\n')
106 vars.append(self.set_vars("Branch:", branch))
107 break
108
109 self.set_config_model(vars)
110
111 def reset(self):
112 self.set_model(None)
113
114#
115# BuildDetailsPage
116#
117
118class BuildDetailsPage (HobPage):
119
120 def __init__(self, builder):
121 super(BuildDetailsPage, self).__init__(builder, "Building ...")
122
123 self.num_of_issues = 0
124 self.endpath = (0,)
125 # create visual elements
126 self.create_visual_elements()
127
128 def create_visual_elements(self):
129 # create visual elements
130 self.vbox = gtk.VBox(False, 12)
131
132 self.progress_box = gtk.VBox(False, 12)
133 self.task_status = gtk.Label("\n") # to ensure layout is correct
134 self.task_status.set_alignment(0.0, 0.5)
135 self.progress_box.pack_start(self.task_status, expand=False, fill=False)
136 self.progress_hbox = gtk.HBox(False, 6)
137 self.progress_box.pack_end(self.progress_hbox, expand=True, fill=True)
138 self.progress_bar = HobProgressBar()
139 self.progress_hbox.pack_start(self.progress_bar, expand=True, fill=True)
140 self.stop_button = HobAltButton("Stop")
141 self.stop_button.connect("clicked", self.stop_button_clicked_cb)
142 self.stop_button.set_sensitive(False)
143 self.progress_hbox.pack_end(self.stop_button, expand=False, fill=False)
144
145 self.notebook = HobNotebook()
146 self.config_tv = BuildConfigurationTreeView()
147 self.scrolled_view_config = gtk.ScrolledWindow ()
148 self.scrolled_view_config.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
149 self.scrolled_view_config.add(self.config_tv)
150 self.notebook.append_page(self.scrolled_view_config, "Build configuration")
151
152 self.failure_tv = BuildFailureTreeView()
153 self.failure_model = self.builder.handler.build.model.failure_model()
154 self.failure_tv.set_model(self.failure_model)
155 self.scrolled_view_failure = gtk.ScrolledWindow ()
156 self.scrolled_view_failure.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
157 self.scrolled_view_failure.add(self.failure_tv)
158 self.notebook.append_page(self.scrolled_view_failure, "Issues")
159
160 self.build_tv = RunningBuildTreeView(readonly=True, hob=True)
161 self.build_tv.set_model(self.builder.handler.build.model)
162 self.scrolled_view_build = gtk.ScrolledWindow ()
163 self.scrolled_view_build.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
164 self.scrolled_view_build.add(self.build_tv)
165 self.notebook.append_page(self.scrolled_view_build, "Log")
166
167 self.builder.handler.build.model.connect_after("row-changed", self.scroll_to_present_row, self.scrolled_view_build.get_vadjustment(), self.build_tv)
168
169 self.button_box = gtk.HBox(False, 6)
170 self.back_button = HobAltButton('&lt;&lt; Back')
171 self.back_button.connect("clicked", self.back_button_clicked_cb)
172 self.button_box.pack_start(self.back_button, expand=False, fill=False)
173
174 def update_build_status(self, current, total, task):
175 recipe_path, recipe_task = task.split(", ")
176 recipe = os.path.basename(recipe_path).rstrip(".bb")
177 tsk_msg = "<b>Running task %s of %s:</b> %s\n<b>Recipe:</b> %s" % (current, total, recipe_task, recipe)
178 self.task_status.set_markup(tsk_msg)
179 self.stop_button.set_sensitive(True)
180
181 def reset_build_status(self):
182 self.task_status.set_markup("\n") # to ensure layout is correct
183 self.endpath = (0,)
184
185 def show_issues(self):
186 self.num_of_issues += 1
187 self.notebook.show_indicator_icon("Issues", self.num_of_issues)
188 self.notebook.queue_draw()
189
190 def reset_issues(self):
191 self.num_of_issues = 0
192 self.notebook.hide_indicator_icon("Issues")
193
194 def _remove_all_widget(self):
195 children = self.vbox.get_children() or []
196 for child in children:
197 self.vbox.remove(child)
198 children = self.box_group_area.get_children() or []
199 for child in children:
200 self.box_group_area.remove(child)
201 children = self.get_children() or []
202 for child in children:
203 self.remove(child)
204
205 def add_build_fail_top_bar(self, actions, log_file=None):
206 primary_action = "Edit %s" % actions
207
208 color = HobColors.ERROR
209 build_fail_top = gtk.EventBox()
210 #build_fail_top.set_size_request(-1, 200)
211 build_fail_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
212
213 build_fail_tab = gtk.Table(14, 46, True)
214 build_fail_top.add(build_fail_tab)
215
216 icon = gtk.Image()
217 icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ERROR_FILE)
218 icon.set_from_pixbuf(icon_pix_buffer)
219 build_fail_tab.attach(icon, 1, 4, 0, 6)
220
221 label = gtk.Label()
222 label.set_alignment(0.0, 0.5)
223 label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
224 build_fail_tab.attach(label, 4, 26, 0, 6)
225
226 label = gtk.Label()
227 label.set_alignment(0.0, 0.5)
228 # Ensure variable disk_full is defined
229 if not hasattr(self.builder, 'disk_full'):
230 self.builder.disk_full = False
231
232 if self.builder.disk_full:
233 markup = "<span size='medium'>There is no disk space left, so Hob cannot finish building your image. Free up some disk space\n"
234 markup += "and restart the build. Check the \"Issues\" tab for more details</span>"
235 label.set_markup(markup)
236 else:
237 label.set_markup("<span size='medium'>Check the \"Issues\" information for more details</span>")
238 build_fail_tab.attach(label, 4, 40, 4, 9)
239
240 # create button 'Edit packages'
241 action_button = HobButton(primary_action)
242 #action_button.set_size_request(-1, 40)
243 action_button.set_tooltip_text("Edit the %s parameters" % actions)
244 action_button.connect('clicked', self.failure_primary_action_button_clicked_cb, primary_action)
245
246 if log_file:
247 open_log_button = HobAltButton("Open log")
248 open_log_button.set_relief(gtk.RELIEF_HALF)
249 open_log_button.set_tooltip_text("Open the build's log file")
250 open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
251
252 attach_pos = (24 if log_file else 14)
253 file_bug_button = HobAltButton('File a bug')
254 file_bug_button.set_relief(gtk.RELIEF_HALF)
255 file_bug_button.set_tooltip_text("Open the Yocto Project bug tracking website")
256 file_bug_button.connect('clicked', self.failure_activate_file_bug_link_cb)
257
258 if not self.builder.disk_full:
259 build_fail_tab.attach(action_button, 4, 13, 9, 12)
260 if log_file:
261 build_fail_tab.attach(open_log_button, 14, 23, 9, 12)
262 build_fail_tab.attach(file_bug_button, attach_pos, attach_pos + 9, 9, 12)
263
264 else:
265 restart_build = HobButton("Restart the build")
266 restart_build.set_tooltip_text("Restart the build")
267 restart_build.connect('clicked', self.restart_build_button_clicked_cb)
268
269 build_fail_tab.attach(restart_build, 4, 13, 9, 12)
270 build_fail_tab.attach(action_button, 14, 23, 9, 12)
271 if log_file:
272 build_fail_tab.attach(open_log_button, attach_pos, attach_pos + 9, 9, 12)
273
274 self.builder.disk_full = False
275 return build_fail_top
276
277 def show_fail_page(self, title):
278 self._remove_all_widget()
279 self.title = "Hob cannot build your %s" % title
280
281 self.build_fail_bar = self.add_build_fail_top_bar(title, self.builder.current_logfile)
282
283 self.pack_start(self.group_align, expand=True, fill=True)
284 self.box_group_area.pack_start(self.build_fail_bar, expand=False, fill=False)
285 self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
286
287 self.vbox.pack_start(self.notebook, expand=True, fill=True)
288 self.show_all()
289 self.notebook.set_page("Issues")
290 self.back_button.hide()
291
292 def add_build_stop_top_bar(self, action, log_file=None):
293 color = HobColors.LIGHT_GRAY
294 build_stop_top = gtk.EventBox()
295 #build_stop_top.set_size_request(-1, 200)
296 build_stop_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
297 build_stop_top.set_flags(gtk.CAN_DEFAULT)
298 build_stop_top.grab_default()
299
300 build_stop_tab = gtk.Table(11, 46, True)
301 build_stop_top.add(build_stop_tab)
302
303 icon = gtk.Image()
304 icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INFO_HOVER_FILE)
305 icon.set_from_pixbuf(icon_pix_buffer)
306 build_stop_tab.attach(icon, 1, 4, 0, 6)
307
308 label = gtk.Label()
309 label.set_alignment(0.0, 0.5)
310 label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
311 build_stop_tab.attach(label, 4, 26, 0, 6)
312
313 action_button = HobButton("Edit %s" % action)
314 action_button.set_size_request(-1, 40)
315 if action == "image":
316 action_button.set_tooltip_text("Edit the image parameters")
317 elif action == "recipes":
318 action_button.set_tooltip_text("Edit the included recipes")
319 elif action == "packages":
320 action_button.set_tooltip_text("Edit the included packages")
321 action_button.connect('clicked', self.stop_primary_action_button_clicked_cb, action)
322 build_stop_tab.attach(action_button, 4, 13, 6, 9)
323
324 if log_file:
325 open_log_button = HobAltButton("Open log")
326 open_log_button.set_relief(gtk.RELIEF_HALF)
327 open_log_button.set_tooltip_text("Open the build's log file")
328 open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
329 build_stop_tab.attach(open_log_button, 14, 23, 6, 9)
330
331 attach_pos = (24 if log_file else 14)
332 build_button = HobAltButton("Build new image")
333 #build_button.set_size_request(-1, 40)
334 build_button.set_tooltip_text("Create a new image from scratch")
335 build_button.connect('clicked', self.new_image_button_clicked_cb)
336 build_stop_tab.attach(build_button, attach_pos, attach_pos + 9, 6, 9)
337
338 return build_stop_top, action_button
339
340 def show_stop_page(self, action):
341 self._remove_all_widget()
342 self.title = "Build stopped"
343 self.build_stop_bar, action_button = self.add_build_stop_top_bar(action, self.builder.current_logfile)
344
345 self.pack_start(self.group_align, expand=True, fill=True)
346 self.box_group_area.pack_start(self.build_stop_bar, expand=False, fill=False)
347 self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
348
349 self.vbox.pack_start(self.notebook, expand=True, fill=True)
350 self.show_all()
351 self.back_button.hide()
352 return action_button
353
354 def show_page(self, step):
355 self._remove_all_widget()
356 if step == self.builder.PACKAGE_GENERATING or step == self.builder.FAST_IMAGE_GENERATING:
357 self.title = "Building packages ..."
358 else:
359 self.title = "Building image ..."
360 self.build_details_top = self.add_onto_top_bar(None)
361 self.pack_start(self.build_details_top, expand=False, fill=False)
362 self.pack_start(self.group_align, expand=True, fill=True)
363
364 self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
365
366 self.progress_bar.reset()
367 self.config_tv.reset()
368 self.vbox.pack_start(self.progress_box, expand=False, fill=False)
369
370 self.vbox.pack_start(self.notebook, expand=True, fill=True)
371
372 self.box_group_area.pack_end(self.button_box, expand=False, fill=False)
373 self.show_all()
374 self.notebook.set_page("Log")
375 self.back_button.hide()
376
377 self.reset_build_status()
378 self.reset_issues()
379
380 def update_progress_bar(self, title, fraction, status=None):
381 self.progress_bar.update(fraction)
382 self.progress_bar.set_title(title)
383 self.progress_bar.set_rcstyle(status)
384
385 def back_button_clicked_cb(self, button):
386 self.builder.show_configuration()
387
388 def new_image_button_clicked_cb(self, button):
389 self.builder.reset()
390
391 def show_back_button(self):
392 self.back_button.show()
393
394 def stop_button_clicked_cb(self, button):
395 self.builder.stop_build()
396
397 def hide_stop_button(self):
398 self.stop_button.set_sensitive(False)
399 self.stop_button.hide()
400
401 def scroll_to_present_row(self, model, path, iter, v_adj, treeview):
402 if treeview and v_adj:
403 if path[0] > self.endpath[0]: # check the event is a new row append or not
404 self.endpath = path
405 # check the gtk.adjustment position is at end boundary or not
406 if (v_adj.upper <= v_adj.page_size) or (v_adj.value == v_adj.upper - v_adj.page_size):
407 treeview.scroll_to_cell(path)
408
409 def show_configurations(self, configurations, params):
410 self.config_tv.show(configurations, params)
411
412 def failure_primary_action_button_clicked_cb(self, button, action):
413 if "Edit recipes" in action:
414 self.builder.show_recipes()
415 elif "Edit packages" in action:
416 self.builder.show_packages()
417 elif "Edit image" in action:
418 self.builder.show_configuration()
419
420 def restart_build_button_clicked_cb(self, button):
421 self.builder.just_bake()
422
423 def stop_primary_action_button_clicked_cb(self, button, action):
424 if "recipes" in action:
425 self.builder.show_recipes()
426 elif "packages" in action:
427 self.builder.show_packages()
428 elif "image" in action:
429 self.builder.show_configuration()
430
431 def open_log_button_clicked_cb(self, button, log_file):
432 if log_file:
433 log_file = "file:///" + log_file
434 gtk.show_uri(screen=button.get_screen(), uri=log_file, timestamp=0)
435
436 def failure_activate_file_bug_link_cb(self, button):
437 button.child.emit('activate-link', "http://bugzilla.yoctoproject.org")
diff --git a/bitbake/lib/bb/ui/crumbs/builder.py b/bitbake/lib/bb/ui/crumbs/builder.py
new file mode 100755
index 0000000000..455af320e8
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/builder.py
@@ -0,0 +1,1475 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2011-2012 Intel Corporation
6#
7# Authored by Joshua Lock <josh@linux.intel.com>
8# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
9# Authored by Shane Wang <shane.wang@intel.com>
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24import glib
25import gtk, gobject
26import copy
27import os
28import subprocess
29import shlex
30import re
31import logging
32import sys
33import signal
34import time
35from bb.ui.crumbs.imageconfigurationpage import ImageConfigurationPage
36from bb.ui.crumbs.recipeselectionpage import RecipeSelectionPage
37from bb.ui.crumbs.packageselectionpage import PackageSelectionPage
38from bb.ui.crumbs.builddetailspage import BuildDetailsPage
39from bb.ui.crumbs.imagedetailspage import ImageDetailsPage
40from bb.ui.crumbs.sanitycheckpage import SanityCheckPage
41from bb.ui.crumbs.hobwidget import hwc, HobButton, HobAltButton
42from bb.ui.crumbs.persistenttooltip import PersistentTooltip
43import bb.ui.crumbs.utils
44from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
45from bb.ui.crumbs.hig.simplesettingsdialog import SimpleSettingsDialog
46from bb.ui.crumbs.hig.advancedsettingsdialog import AdvancedSettingsDialog
47from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
48from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
49from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
50from bb.ui.crumbs.hig.parsingwarningsdialog import ParsingWarningsDialog
51from bb.ui.crumbs.hig.propertydialog import PropertyDialog
52
53hobVer = 20120808
54
55class Configuration:
56 '''Represents the data structure of configuration.'''
57
58 @classmethod
59 def parse_proxy_string(cls, proxy):
60 pattern = "^\s*((http|https|ftp|socks|cvs)://)?((\S+):(\S+)@)?([^\s:]+)(:(\d+))?/?"
61 match = re.search(pattern, proxy)
62 if match:
63 return match.group(2), match.group(4), match.group(5), match.group(6), match.group(8)
64 else:
65 return None, None, None, "", ""
66
67 @classmethod
68 def make_host_string(cls, prot, user, passwd, host, default_prot=""):
69 if host == None or host == "":
70 return ""
71
72 passwd = passwd or ""
73
74 if user != None and user != "":
75 if prot == None or prot == "":
76 prot = default_prot
77 return prot + "://" + user + ":" + passwd + "@" + host
78 else:
79 if prot == None or prot == "":
80 return host
81 else:
82 return prot + "://" + host
83
84 @classmethod
85 def make_port_string(cls, port):
86 port = port or ""
87 return port
88
89 @classmethod
90 def make_proxy_string(cls, prot, user, passwd, host, port, default_prot=""):
91 if host == None or host == "":# or port == None or port == "":
92 return ""
93
94 return Configuration.make_host_string(prot, user, passwd, host, default_prot) + (":" + Configuration.make_port_string(port) if port else "")
95
96 def __init__(self):
97 self.curr_mach = ""
98 self.selected_image = None
99 # settings
100 self.curr_distro = ""
101 self.dldir = self.sstatedir = self.sstatemirror = ""
102 self.pmake = self.bbthread = 0
103 self.curr_package_format = ""
104 self.image_rootfs_size = self.image_extra_size = 0
105 self.image_overhead_factor = 1
106 self.incompat_license = ""
107 self.curr_sdk_machine = ""
108 self.conf_version = self.lconf_version = ""
109 self.extra_setting = {}
110 self.toolchain_build = False
111 self.image_fstypes = ""
112 self.image_size = None
113 self.image_packages = []
114 # bblayers.conf
115 self.layers = []
116 # image/recipes/packages
117 self.clear_selection()
118
119 self.user_selected_packages = []
120
121 self.default_task = "build"
122
123 # proxy settings
124 self.enable_proxy = None
125 self.same_proxy = False
126 self.proxies = {
127 "http" : [None, None, None, "", ""], # protocol : [prot, user, passwd, host, port]
128 "https" : [None, None, None, "", ""],
129 "ftp" : [None, None, None, "", ""],
130 "socks" : [None, None, None, "", ""],
131 "cvs" : [None, None, None, "", ""],
132 }
133
134 def clear_selection(self):
135 self.selected_recipes = []
136 self.selected_packages = []
137 self.initial_selected_image = None
138 self.initial_selected_packages = []
139 self.initial_user_selected_packages = []
140
141 def split_proxy(self, protocol, proxy):
142 entry = []
143 prot, user, passwd, host, port = Configuration.parse_proxy_string(proxy)
144 entry.append(prot)
145 entry.append(user)
146 entry.append(passwd)
147 entry.append(host)
148 entry.append(port)
149 self.proxies[protocol] = entry
150
151 def combine_proxy(self, protocol):
152 entry = self.proxies[protocol]
153 return Configuration.make_proxy_string(entry[0], entry[1], entry[2], entry[3], entry[4], protocol)
154
155 def combine_host_only(self, protocol):
156 entry = self.proxies[protocol]
157 return Configuration.make_host_string(entry[0], entry[1], entry[2], entry[3], protocol)
158
159 def combine_port_only(self, protocol):
160 entry = self.proxies[protocol]
161 return Configuration.make_port_string(entry[4])
162
163 def update(self, params):
164 # settings
165 self.curr_distro = params["distro"]
166 self.dldir = params["dldir"]
167 self.sstatedir = params["sstatedir"]
168 self.sstatemirror = params["sstatemirror"]
169 self.pmake = int(params["pmake"].split()[1])
170 self.bbthread = params["bbthread"]
171 self.curr_package_format = " ".join(params["pclass"].split("package_")).strip()
172 self.image_rootfs_size = params["image_rootfs_size"]
173 self.image_extra_size = params["image_extra_size"]
174 self.image_overhead_factor = params['image_overhead_factor']
175 self.incompat_license = params["incompat_license"]
176 self.curr_sdk_machine = params["sdk_machine"]
177 self.conf_version = params["conf_version"]
178 self.lconf_version = params["lconf_version"]
179 self.image_fstypes = params["image_fstypes"]
180 # self.extra_setting/self.toolchain_build
181 # bblayers.conf
182 self.layers = params["layer"].split()
183 self.layers_non_removable = params["layers_non_removable"].split()
184 self.default_task = params["default_task"]
185
186 # proxy settings
187 self.enable_proxy = params["http_proxy"] != "" or params["https_proxy"] != "" \
188 or params["ftp_proxy"] != "" or params["socks_proxy"] != "" \
189 or params["cvs_proxy_host"] != "" or params["cvs_proxy_port"] != ""
190 self.split_proxy("http", params["http_proxy"])
191 self.split_proxy("https", params["https_proxy"])
192 self.split_proxy("ftp", params["ftp_proxy"])
193 self.split_proxy("socks", params["socks_proxy"])
194 self.split_proxy("cvs", params["cvs_proxy_host"] + ":" + params["cvs_proxy_port"])
195
196 def save(self, handler, defaults=False):
197 # bblayers.conf
198 handler.set_var_in_file("BBLAYERS", self.layers, "bblayers.conf")
199 # local.conf
200 if not defaults:
201 handler.early_assign_var_in_file("MACHINE", self.curr_mach, "local.conf")
202 handler.set_var_in_file("DISTRO", self.curr_distro, "local.conf")
203 handler.set_var_in_file("DL_DIR", self.dldir, "local.conf")
204 handler.set_var_in_file("SSTATE_DIR", self.sstatedir, "local.conf")
205 sstate_mirror_list = self.sstatemirror.split("\\n ")
206 sstate_mirror_list_modified = []
207 for mirror in sstate_mirror_list:
208 if mirror != "":
209 mirror = mirror + "\\n"
210 sstate_mirror_list_modified.append(mirror)
211 handler.set_var_in_file("SSTATE_MIRRORS", sstate_mirror_list_modified, "local.conf")
212 handler.set_var_in_file("PARALLEL_MAKE", "-j %s" % self.pmake, "local.conf")
213 handler.set_var_in_file("BB_NUMBER_THREADS", self.bbthread, "local.conf")
214 handler.set_var_in_file("PACKAGE_CLASSES", " ".join(["package_" + i for i in self.curr_package_format.split()]), "local.conf")
215 handler.set_var_in_file("IMAGE_ROOTFS_SIZE", self.image_rootfs_size, "local.conf")
216 handler.set_var_in_file("IMAGE_EXTRA_SPACE", self.image_extra_size, "local.conf")
217 handler.set_var_in_file("INCOMPATIBLE_LICENSE", self.incompat_license, "local.conf")
218 handler.set_var_in_file("SDKMACHINE", self.curr_sdk_machine, "local.conf")
219 handler.set_var_in_file("CONF_VERSION", self.conf_version, "local.conf")
220 handler.set_var_in_file("LCONF_VERSION", self.lconf_version, "bblayers.conf")
221 handler.set_extra_config(self.extra_setting)
222 handler.set_var_in_file("TOOLCHAIN_BUILD", self.toolchain_build, "local.conf")
223 handler.set_var_in_file("IMAGE_FSTYPES", self.image_fstypes, "local.conf")
224 if not defaults:
225 # image/recipes/packages
226 handler.set_var_in_file("__SELECTED_IMAGE__", self.selected_image, "local.conf")
227 handler.set_var_in_file("DEPENDS", self.selected_recipes, "local.conf")
228 handler.set_var_in_file("IMAGE_INSTALL", self.user_selected_packages, "local.conf")
229 # proxy
230 if self.enable_proxy == True:
231 handler.set_var_in_file("http_proxy", self.combine_proxy("http"), "local.conf")
232 handler.set_var_in_file("https_proxy", self.combine_proxy("https"), "local.conf")
233 handler.set_var_in_file("ftp_proxy", self.combine_proxy("ftp"), "local.conf")
234 handler.set_var_in_file("all_proxy", self.combine_proxy("socks"), "local.conf")
235 handler.set_var_in_file("CVS_PROXY_HOST", self.combine_host_only("cvs"), "local.conf")
236 handler.set_var_in_file("CVS_PROXY_PORT", self.combine_port_only("cvs"), "local.conf")
237 else:
238 handler.set_var_in_file("http_proxy", "", "local.conf")
239 handler.set_var_in_file("https_proxy", "", "local.conf")
240 handler.set_var_in_file("ftp_proxy", "", "local.conf")
241 handler.set_var_in_file("all_proxy", "", "local.conf")
242 handler.set_var_in_file("CVS_PROXY_HOST", "", "local.conf")
243 handler.set_var_in_file("CVS_PROXY_PORT", "", "local.conf")
244
245 def __str__(self):
246 s = "VERSION: '%s', BBLAYERS: '%s', MACHINE: '%s', DISTRO: '%s', DL_DIR: '%s'," % \
247 (hobVer, " ".join(self.layers), self.curr_mach, self.curr_distro, self.dldir )
248 s += "SSTATE_DIR: '%s', SSTATE_MIRROR: '%s', PARALLEL_MAKE: '-j %s', BB_NUMBER_THREADS: '%s', PACKAGE_CLASSES: '%s', " % \
249 (self.sstatedir, self.sstatemirror, self.pmake, self.bbthread, " ".join(["package_" + i for i in self.curr_package_format.split()]))
250 s += "IMAGE_ROOTFS_SIZE: '%s', IMAGE_EXTRA_SPACE: '%s', INCOMPATIBLE_LICENSE: '%s', SDKMACHINE: '%s', CONF_VERSION: '%s', " % \
251 (self.image_rootfs_size, self.image_extra_size, self.incompat_license, self.curr_sdk_machine, self.conf_version)
252 s += "LCONF_VERSION: '%s', EXTRA_SETTING: '%s', TOOLCHAIN_BUILD: '%s', IMAGE_FSTYPES: '%s', __SELECTED_IMAGE__: '%s', " % \
253 (self.lconf_version, self.extra_setting, self.toolchain_build, self.image_fstypes, self.selected_image)
254 s += "DEPENDS: '%s', IMAGE_INSTALL: '%s', enable_proxy: '%s', use_same_proxy: '%s', http_proxy: '%s', " % \
255 (self.selected_recipes, self.user_selected_packages, self.enable_proxy, self.same_proxy, self.combine_proxy("http"))
256 s += "https_proxy: '%s', ftp_proxy: '%s', all_proxy: '%s', CVS_PROXY_HOST: '%s', CVS_PROXY_PORT: '%s'" % \
257 (self.combine_proxy("https"), self.combine_proxy("ftp"), self.combine_proxy("socks"),
258 self.combine_host_only("cvs"), self.combine_port_only("cvs"))
259 return s
260
261class Parameters:
262 '''Represents other variables like available machines, etc.'''
263
264 def __init__(self):
265 # Variables
266 self.max_threads = 65535
267 self.core_base = ""
268 self.image_addr = ""
269 self.image_types = []
270 self.runnable_image_types = []
271 self.runnable_machine_patterns = []
272 self.deployable_image_types = []
273 self.tmpdir = ""
274
275 self.all_machines = []
276 self.all_package_formats = []
277 self.all_distros = []
278 self.all_sdk_machines = []
279 self.all_layers = []
280 self.image_names = []
281 self.image_white_pattern = ""
282 self.image_black_pattern = ""
283
284 # for build log to show
285 self.bb_version = ""
286 self.target_arch = ""
287 self.target_os = ""
288 self.distro_version = ""
289 self.tune_pkgarch = ""
290
291 def update(self, params):
292 self.max_threads = params["max_threads"]
293 self.core_base = params["core_base"]
294 self.image_addr = params["image_addr"]
295 self.image_types = params["image_types"].split()
296 self.runnable_image_types = params["runnable_image_types"].split()
297 self.runnable_machine_patterns = params["runnable_machine_patterns"].split()
298 self.deployable_image_types = params["deployable_image_types"].split()
299 self.tmpdir = params["tmpdir"]
300 self.image_white_pattern = params["image_white_pattern"]
301 self.image_black_pattern = params["image_black_pattern"]
302 self.kernel_image_type = params["kernel_image_type"]
303 # for build log to show
304 self.bb_version = params["bb_version"]
305 self.target_arch = params["target_arch"]
306 self.target_os = params["target_os"]
307 self.distro_version = params["distro_version"]
308 self.tune_pkgarch = params["tune_pkgarch"]
309
310def hob_conf_filter(fn, data):
311 if fn.endswith("/local.conf"):
312 distro = data.getVar("DISTRO_HOB")
313 if distro:
314 if distro != "defaultsetup":
315 data.setVar("DISTRO", distro)
316 else:
317 data.delVar("DISTRO")
318
319 keys = ["MACHINE_HOB", "SDKMACHINE_HOB", "PACKAGE_CLASSES_HOB", \
320 "BB_NUMBER_THREADS_HOB", "PARALLEL_MAKE_HOB", "DL_DIR_HOB", \
321 "SSTATE_DIR_HOB", "SSTATE_MIRRORS_HOB", "INCOMPATIBLE_LICENSE_HOB"]
322 for key in keys:
323 var_hob = data.getVar(key)
324 if var_hob:
325 data.setVar(key.split("_HOB")[0], var_hob)
326 return
327
328 if fn.endswith("/bblayers.conf"):
329 layers = data.getVar("BBLAYERS_HOB")
330 if layers:
331 data.setVar("BBLAYERS", layers)
332 return
333
334class Builder(gtk.Window):
335
336 (INITIAL_CHECKS,
337 MACHINE_SELECTION,
338 RCPPKGINFO_POPULATING,
339 RCPPKGINFO_POPULATED,
340 BASEIMG_SELECTED,
341 RECIPE_SELECTION,
342 PACKAGE_GENERATING,
343 PACKAGE_GENERATED,
344 PACKAGE_SELECTION,
345 FAST_IMAGE_GENERATING,
346 IMAGE_GENERATING,
347 IMAGE_GENERATED,
348 MY_IMAGE_OPENED,
349 BACK,
350 END_NOOP) = range(15)
351
352 (SANITY_CHECK,
353 IMAGE_CONFIGURATION,
354 RECIPE_DETAILS,
355 BUILD_DETAILS,
356 PACKAGE_DETAILS,
357 IMAGE_DETAILS,
358 END_TAB) = range(7)
359
360 __step2page__ = {
361 INITIAL_CHECKS : SANITY_CHECK,
362 MACHINE_SELECTION : IMAGE_CONFIGURATION,
363 RCPPKGINFO_POPULATING : IMAGE_CONFIGURATION,
364 RCPPKGINFO_POPULATED : IMAGE_CONFIGURATION,
365 BASEIMG_SELECTED : IMAGE_CONFIGURATION,
366 RECIPE_SELECTION : RECIPE_DETAILS,
367 PACKAGE_GENERATING : BUILD_DETAILS,
368 PACKAGE_GENERATED : PACKAGE_DETAILS,
369 PACKAGE_SELECTION : PACKAGE_DETAILS,
370 FAST_IMAGE_GENERATING : BUILD_DETAILS,
371 IMAGE_GENERATING : BUILD_DETAILS,
372 IMAGE_GENERATED : IMAGE_DETAILS,
373 MY_IMAGE_OPENED : IMAGE_DETAILS,
374 END_NOOP : None,
375 }
376
377 SANITY_CHECK_MIN_DISPLAY_TIME = 5
378
379 def __init__(self, hobHandler, recipe_model, package_model):
380 super(Builder, self).__init__()
381
382 self.hob_image = "hob-image"
383
384 # handler
385 self.handler = hobHandler
386
387 # logger
388 self.logger = logging.getLogger("BitBake")
389 self.consolelog = None
390 self.current_logfile = None
391
392 # configuration and parameters
393 self.configuration = Configuration()
394 self.parameters = Parameters()
395
396 # build step
397 self.current_step = None
398 self.previous_step = None
399
400 self.stopping = False
401
402 # recipe model and package model
403 self.recipe_model = recipe_model
404 self.package_model = package_model
405
406 # Indicate whether user has customized the image
407 self.customized = False
408
409 # Indicate whether the UI is working
410 self.sensitive = True
411
412 # Indicate whether the sanity check ran
413 self.sanity_checked = False
414
415 # save parsing warnings
416 self.parsing_warnings = []
417
418 # create visual elements
419 self.create_visual_elements()
420
421 # connect the signals to functions
422 self.connect("delete-event", self.destroy_window_cb)
423 self.recipe_model.connect ("recipe-selection-changed", self.recipelist_changed_cb)
424 self.package_model.connect("package-selection-changed", self.packagelist_changed_cb)
425 self.handler.connect("config-updated", self.handler_config_updated_cb)
426 self.handler.connect("package-formats-updated", self.handler_package_formats_updated_cb)
427 self.handler.connect("parsing-started", self.handler_parsing_started_cb)
428 self.handler.connect("parsing", self.handler_parsing_cb)
429 self.handler.connect("parsing-completed", self.handler_parsing_completed_cb)
430 self.handler.build.connect("build-started", self.handler_build_started_cb)
431 self.handler.build.connect("build-succeeded", self.handler_build_succeeded_cb)
432 self.handler.build.connect("build-failed", self.handler_build_failed_cb)
433 self.handler.build.connect("build-aborted", self.handler_build_aborted_cb)
434 self.handler.build.connect("task-started", self.handler_task_started_cb)
435 self.handler.build.connect("disk-full", self.handler_disk_full_cb)
436 self.handler.build.connect("log-error", self.handler_build_failure_cb)
437 self.handler.build.connect("log-warning", self.handler_build_failure_cb)
438 self.handler.build.connect("log", self.handler_build_log_cb)
439 self.handler.build.connect("no-provider", self.handler_no_provider_cb)
440 self.handler.connect("generating-data", self.handler_generating_data_cb)
441 self.handler.connect("data-generated", self.handler_data_generated_cb)
442 self.handler.connect("command-succeeded", self.handler_command_succeeded_cb)
443 self.handler.connect("command-failed", self.handler_command_failed_cb)
444 self.handler.connect("parsing-warning", self.handler_parsing_warning_cb)
445 self.handler.connect("sanity-failed", self.handler_sanity_failed_cb)
446 self.handler.connect("recipe-populated", self.handler_recipe_populated_cb)
447 self.handler.connect("package-populated", self.handler_package_populated_cb)
448
449 self.handler.append_to_bbfiles("${TOPDIR}/recipes/images/custom/*.bb")
450 self.handler.append_to_bbfiles("${TOPDIR}/recipes/images/*.bb")
451 self.initiate_new_build_async()
452
453 signal.signal(signal.SIGINT, self.event_handle_SIGINT)
454
455 def create_visual_elements(self):
456 self.set_title("Hob")
457 self.set_icon_name("applications-development")
458 self.set_resizable(True)
459
460 try:
461 window_width = self.get_screen().get_width()
462 window_height = self.get_screen().get_height()
463 except AttributeError:
464 print "Please set DISPLAY variable before running Hob."
465 sys.exit(1)
466
467 if window_width >= hwc.MAIN_WIN_WIDTH:
468 window_width = hwc.MAIN_WIN_WIDTH
469 window_height = hwc.MAIN_WIN_HEIGHT
470 self.set_size_request(window_width, window_height)
471
472 self.vbox = gtk.VBox(False, 0)
473 self.vbox.set_border_width(0)
474 self.add(self.vbox)
475
476 # create pages
477 self.image_configuration_page = ImageConfigurationPage(self)
478 self.recipe_details_page = RecipeSelectionPage(self)
479 self.build_details_page = BuildDetailsPage(self)
480 self.package_details_page = PackageSelectionPage(self)
481 self.image_details_page = ImageDetailsPage(self)
482 self.sanity_check_page = SanityCheckPage(self)
483 self.display_sanity_check = False
484 self.sanity_check_post_func = False
485 self.had_network_error = False
486
487 self.nb = gtk.Notebook()
488 self.nb.set_show_tabs(False)
489 self.nb.insert_page(self.sanity_check_page, None, self.SANITY_CHECK)
490 self.nb.insert_page(self.image_configuration_page, None, self.IMAGE_CONFIGURATION)
491 self.nb.insert_page(self.recipe_details_page, None, self.RECIPE_DETAILS)
492 self.nb.insert_page(self.build_details_page, None, self.BUILD_DETAILS)
493 self.nb.insert_page(self.package_details_page, None, self.PACKAGE_DETAILS)
494 self.nb.insert_page(self.image_details_page, None, self.IMAGE_DETAILS)
495 self.vbox.pack_start(self.nb, expand=True, fill=True)
496
497 self.show_all()
498 self.nb.set_current_page(0)
499
500 def sanity_check_timeout(self):
501 # The minimum time for showing the 'sanity check' page has passe
502 # If someone set the 'sanity_check_post_step' meanwhile, execute it now
503 self.display_sanity_check = False
504 if self.sanity_check_post_func:
505 temp = self.sanity_check_post_func
506 self.sanity_check_post_func = None
507 temp()
508 return False
509
510 def show_sanity_check_page(self):
511 # This window must stay on screen for at least 5 seconds, according to the design document
512 self.nb.set_current_page(self.SANITY_CHECK)
513 self.sanity_check_post_step = None
514 self.display_sanity_check = True
515 self.sanity_check_page.start()
516 gobject.timeout_add(self.SANITY_CHECK_MIN_DISPLAY_TIME * 1000, self.sanity_check_timeout)
517
518 def execute_after_sanity_check(self, func):
519 if not self.display_sanity_check:
520 func()
521 else:
522 self.sanity_check_post_func = func
523
524 def generate_configuration(self):
525 if not self.sanity_checked:
526 self.show_sanity_check_page()
527 self.handler.generate_configuration()
528
529 def initiate_new_build_async(self):
530 self.configuration.selected_image = None
531 self.switch_page(self.MACHINE_SELECTION)
532 self.handler.init_cooker()
533 self.handler.set_extra_inherit("image_types")
534 self.generate_configuration()
535
536 def update_config_async(self):
537 self.set_user_config()
538 self.generate_configuration()
539 self.switch_page(self.MACHINE_SELECTION)
540
541 def sanity_check(self):
542 self.handler.trigger_sanity_check()
543
544 def populate_recipe_package_info_async(self):
545 self.switch_page(self.RCPPKGINFO_POPULATING)
546 # Parse recipes
547 self.set_user_config()
548 self.handler.generate_recipes()
549
550 def generate_packages_async(self, log = False):
551 self.switch_page(self.PACKAGE_GENERATING)
552 if log:
553 self.current_logfile = self.handler.get_logfile()
554 self.do_log(self.current_logfile)
555 # Build packages
556 _, all_recipes = self.recipe_model.get_selected_recipes()
557 self.set_user_config()
558 self.handler.reset_build()
559 self.handler.generate_packages(all_recipes, self.configuration.default_task)
560
561 def restore_initial_selected_packages(self):
562 self.package_model.set_selected_packages(self.configuration.initial_user_selected_packages, True)
563 self.package_model.set_selected_packages(self.configuration.initial_selected_packages)
564 for package in self.configuration.selected_packages:
565 if package not in self.configuration.initial_selected_packages:
566 self.package_model.exclude_item(self.package_model.find_path_for_item(package))
567
568 def fast_generate_image_async(self, log = False):
569 self.switch_page(self.FAST_IMAGE_GENERATING)
570 if log:
571 self.current_logfile = self.handler.get_logfile()
572 self.do_log(self.current_logfile)
573 # Build packages
574 _, all_recipes = self.recipe_model.get_selected_recipes()
575 self.set_user_config()
576 self.handler.reset_build()
577 self.handler.generate_packages(all_recipes, self.configuration.default_task)
578
579 def generate_image_async(self, cont = False):
580 self.switch_page(self.IMAGE_GENERATING)
581 self.handler.reset_build()
582 if not cont:
583 self.current_logfile = self.handler.get_logfile()
584 self.do_log(self.current_logfile)
585 # Build image
586 self.set_user_config()
587 toolchain_packages = []
588 base_image = None
589 if self.configuration.toolchain_build:
590 toolchain_packages = self.package_model.get_selected_packages_toolchain()
591 if self.configuration.selected_image == self.recipe_model.__custom_image__:
592 packages = self.package_model.get_selected_packages()
593 image = self.hob_image
594 base_image = self.configuration.initial_selected_image
595 else:
596 packages = []
597 image = self.configuration.selected_image
598 self.handler.generate_image(image,
599 base_image,
600 packages,
601 toolchain_packages,
602 self.configuration.default_task)
603
604 def generate_new_image(self, image, description):
605 base_image = self.configuration.initial_selected_image
606 if base_image == self.recipe_model.__custom_image__:
607 base_image = None
608 packages = self.package_model.get_selected_packages()
609 self.handler.generate_new_image(image, base_image, packages, description)
610
611 def ensure_dir(self, directory):
612 self.handler.ensure_dir(directory)
613
614 def get_parameters_sync(self):
615 return self.handler.get_parameters()
616
617 def request_package_info_async(self):
618 self.handler.request_package_info()
619
620 def cancel_build_sync(self, force=False):
621 self.handler.cancel_build(force)
622
623 def cancel_parse_sync(self):
624 self.handler.cancel_parse()
625
626 def switch_page(self, next_step):
627 # Main Workflow (Business Logic)
628 self.nb.set_current_page(self.__step2page__[next_step])
629
630 if next_step == self.MACHINE_SELECTION: # init step
631 self.image_configuration_page.show_machine()
632
633 elif next_step == self.RCPPKGINFO_POPULATING:
634 # MACHINE CHANGED action or SETTINGS CHANGED
635 # show the progress bar
636 self.image_configuration_page.show_info_populating()
637
638 elif next_step == self.RCPPKGINFO_POPULATED:
639 self.image_configuration_page.show_info_populated()
640
641 elif next_step == self.BASEIMG_SELECTED:
642 self.image_configuration_page.show_baseimg_selected()
643
644 elif next_step == self.RECIPE_SELECTION:
645 if self.recipe_model.get_selected_image() == self.recipe_model.__custom_image__:
646 self.recipe_details_page.set_recipe_curr_tab(self.recipe_details_page.ALL)
647 else:
648 self.recipe_details_page.set_recipe_curr_tab(self.recipe_details_page.INCLUDED)
649
650 elif next_step == self.PACKAGE_SELECTION:
651 self.configuration.initial_selected_packages = self.configuration.selected_packages
652 self.configuration.initial_user_selected_packages = self.configuration.user_selected_packages
653 self.package_details_page.set_title("Edit packages")
654 if self.recipe_model.get_selected_image() == self.recipe_model.__custom_image__:
655 self.package_details_page.set_packages_curr_tab(self.package_details_page.ALL)
656 else:
657 self.package_details_page.set_packages_curr_tab(self.package_details_page.INCLUDED)
658 self.package_details_page.show_page(self.current_logfile)
659
660
661 elif next_step == self.PACKAGE_GENERATING or next_step == self.FAST_IMAGE_GENERATING:
662 # both PACKAGE_GENERATING and FAST_IMAGE_GENERATING share the same page
663 self.build_details_page.show_page(next_step)
664
665 elif next_step == self.PACKAGE_GENERATED:
666 self.package_details_page.set_title("Step 2 of 2: Edit packages")
667 if self.recipe_model.get_selected_image() == self.recipe_model.__custom_image__:
668 self.package_details_page.set_packages_curr_tab(self.package_details_page.ALL)
669 else:
670 self.package_details_page.set_packages_curr_tab(self.package_details_page.INCLUDED)
671 self.package_details_page.show_page(self.current_logfile)
672
673 elif next_step == self.IMAGE_GENERATING:
674 # after packages are generated, selected_packages need to
675 # be updated in package_model per selected_image in recipe_model
676 self.build_details_page.show_page(next_step)
677
678 elif next_step == self.IMAGE_GENERATED:
679 self.image_details_page.show_page(next_step)
680
681 elif next_step == self.MY_IMAGE_OPENED:
682 self.image_details_page.show_page(next_step)
683
684 self.previous_step = self.current_step
685 self.current_step = next_step
686
687 def set_user_config_proxies(self):
688 if self.configuration.enable_proxy == True:
689 self.handler.set_http_proxy(self.configuration.combine_proxy("http"))
690 self.handler.set_https_proxy(self.configuration.combine_proxy("https"))
691 self.handler.set_ftp_proxy(self.configuration.combine_proxy("ftp"))
692 self.handler.set_socks_proxy(self.configuration.combine_proxy("socks"))
693 self.handler.set_cvs_proxy(self.configuration.combine_host_only("cvs"), self.configuration.combine_port_only("cvs"))
694 elif self.configuration.enable_proxy == False:
695 self.handler.set_http_proxy("")
696 self.handler.set_https_proxy("")
697 self.handler.set_ftp_proxy("")
698 self.handler.set_socks_proxy("")
699 self.handler.set_cvs_proxy("", "")
700
701 def set_user_config_extra(self):
702 self.handler.set_rootfs_size(self.configuration.image_rootfs_size)
703 self.handler.set_extra_size(self.configuration.image_extra_size)
704 self.handler.set_incompatible_license(self.configuration.incompat_license)
705 self.handler.set_sdk_machine(self.configuration.curr_sdk_machine)
706 self.handler.set_image_fstypes(self.configuration.image_fstypes)
707 self.handler.set_extra_config(self.configuration.extra_setting)
708 self.handler.set_extra_inherit("packageinfo image_types")
709 self.set_user_config_proxies()
710
711 def set_user_config(self):
712 # set bb layers
713 self.handler.set_bblayers(self.configuration.layers)
714 # set local configuration
715 self.handler.set_machine(self.configuration.curr_mach)
716 self.handler.set_package_format(self.configuration.curr_package_format)
717 self.handler.set_distro(self.configuration.curr_distro)
718 self.handler.set_dl_dir(self.configuration.dldir)
719 self.handler.set_sstate_dir(self.configuration.sstatedir)
720 self.handler.set_sstate_mirrors(self.configuration.sstatemirror)
721 self.handler.set_pmake(self.configuration.pmake)
722 self.handler.set_bbthreads(self.configuration.bbthread)
723 self.set_user_config_extra()
724
725 def update_recipe_model(self, selected_image, selected_recipes):
726 self.recipe_model.set_selected_image(selected_image)
727 self.recipe_model.set_selected_recipes(selected_recipes)
728
729 def update_package_model(self, selected_packages, user_selected_packages=None):
730 if user_selected_packages:
731 left = self.package_model.set_selected_packages(user_selected_packages, True)
732 self.configuration.user_selected_packages += left
733 left = self.package_model.set_selected_packages(selected_packages)
734 self.configuration.selected_packages += left
735
736 def update_configuration_parameters(self, params):
737 if params:
738 self.configuration.update(params)
739 self.parameters.update(params)
740
741 def set_base_image(self):
742 self.configuration.initial_selected_image = self.configuration.selected_image
743 if self.configuration.selected_image != self.recipe_model.__custom_image__:
744 self.hob_image = self.configuration.selected_image + "-edited"
745
746 def reset(self):
747 self.configuration.curr_mach = ""
748 self.configuration.clear_selection()
749 self.image_configuration_page.switch_machine_combo()
750 self.switch_page(self.MACHINE_SELECTION)
751
752 # Callback Functions
753 def handler_config_updated_cb(self, handler, which, values):
754 if which == "distro":
755 self.parameters.all_distros = values
756 elif which == "machine":
757 self.parameters.all_machines = values
758 self.image_configuration_page.update_machine_combo()
759 elif which == "machine-sdk":
760 self.parameters.all_sdk_machines = values
761
762 def handler_package_formats_updated_cb(self, handler, formats):
763 self.parameters.all_package_formats = formats
764
765 def switch_to_image_configuration_helper(self):
766 self.sanity_check_page.stop()
767 self.switch_page(self.IMAGE_CONFIGURATION)
768 self.image_configuration_page.switch_machine_combo()
769
770 def show_network_error_dialog_helper(self):
771 self.sanity_check_page.stop()
772 self.show_network_error_dialog()
773
774 def handler_command_succeeded_cb(self, handler, initcmd):
775 if initcmd == self.handler.GENERATE_CONFIGURATION:
776 if not self.configuration.curr_mach:
777 self.configuration.curr_mach = self.handler.runCommand(["getVariable", "HOB_MACHINE"]) or ""
778 self.update_configuration_parameters(self.get_parameters_sync())
779 if not self.sanity_checked:
780 self.sanity_check()
781 self.sanity_checked = True
782 elif initcmd == self.handler.SANITY_CHECK:
783 if self.had_network_error:
784 self.had_network_error = False
785 self.execute_after_sanity_check(self.show_network_error_dialog_helper)
786 else:
787 # Switch to the 'image configuration' page now, but we might need
788 # to wait for the minimum display time of the sanity check page
789 self.execute_after_sanity_check(self.switch_to_image_configuration_helper)
790 elif initcmd in [self.handler.GENERATE_RECIPES,
791 self.handler.GENERATE_PACKAGES,
792 self.handler.GENERATE_IMAGE]:
793 self.update_configuration_parameters(self.get_parameters_sync())
794 self.request_package_info_async()
795 elif initcmd == self.handler.POPULATE_PACKAGEINFO:
796 if self.current_step == self.RCPPKGINFO_POPULATING:
797 self.switch_page(self.RCPPKGINFO_POPULATED)
798 self.rcppkglist_populated()
799 return
800
801 self.rcppkglist_populated()
802 if self.current_step == self.FAST_IMAGE_GENERATING:
803 self.generate_image_async(True)
804
805 def show_error_dialog(self, msg):
806 lbl = "<b>Hob found an error</b>"
807 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_ERROR, msg)
808 button = dialog.add_button("Close", gtk.RESPONSE_OK)
809 HobButton.style_button(button)
810 response = dialog.run()
811 dialog.destroy()
812
813 def show_warning_dialog(self):
814 dialog = ParsingWarningsDialog(title = "View warnings",
815 warnings = self.parsing_warnings,
816 parent = None,
817 flags = gtk.DIALOG_DESTROY_WITH_PARENT
818 | gtk.DIALOG_NO_SEPARATOR)
819 response = dialog.run()
820 dialog.destroy()
821
822 def show_network_error_dialog(self):
823 lbl = "<b>Hob cannot connect to the network</b>"
824 msg = msg + "Please check your network connection. If you are using a proxy server, please make sure it is configured correctly."
825 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_ERROR, msg)
826 button = dialog.add_button("Close", gtk.RESPONSE_OK)
827 HobButton.style_button(button)
828 button = dialog.add_button("Proxy settings", gtk.RESPONSE_CANCEL)
829 HobButton.style_button(button)
830 res = dialog.run()
831 dialog.destroy()
832 if res == gtk.RESPONSE_CANCEL:
833 res, settings_changed = self.show_simple_settings_dialog(SimpleSettingsDialog.PROXIES_PAGE_ID)
834 if not res:
835 return
836 if settings_changed:
837 self.reparse_post_adv_settings()
838
839 def handler_command_failed_cb(self, handler, msg):
840 if msg:
841 self.show_error_dialog(msg)
842 self.reset()
843
844 def handler_parsing_warning_cb(self, handler, warn_msg):
845 self.parsing_warnings.append(warn_msg)
846
847 def handler_sanity_failed_cb(self, handler, msg, network_error):
848 self.reset()
849 if network_error:
850 # Mark this in an internal field. The "network error" dialog will be
851 # shown later, when a SanityCheckPassed event will be handled
852 # (as sent by sanity.bbclass)
853 self.had_network_error = True
854 else:
855 msg = msg.replace("your local.conf", "Settings")
856 self.show_error_dialog(msg)
857 self.reset()
858
859 def window_sensitive(self, sensitive):
860 self.image_configuration_page.machine_combo.set_sensitive(sensitive)
861 self.image_configuration_page.machine_combo.child.set_sensitive(sensitive)
862 self.image_configuration_page.image_combo.set_sensitive(sensitive)
863 self.image_configuration_page.image_combo.child.set_sensitive(sensitive)
864 self.image_configuration_page.layer_button.set_sensitive(sensitive)
865 self.image_configuration_page.layer_info_icon.set_sensitive(sensitive)
866 self.image_configuration_page.toolbar.set_sensitive(sensitive)
867 self.image_configuration_page.view_adv_configuration_button.set_sensitive(sensitive)
868 self.image_configuration_page.config_build_button.set_sensitive(sensitive)
869
870 self.recipe_details_page.set_sensitive(sensitive)
871 self.package_details_page.set_sensitive(sensitive)
872 self.build_details_page.set_sensitive(sensitive)
873 self.image_details_page.set_sensitive(sensitive)
874
875 if sensitive:
876 self.window.set_cursor(None)
877 else:
878 self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
879 self.sensitive = sensitive
880
881
882 def handler_generating_data_cb(self, handler):
883 self.window_sensitive(False)
884
885 def handler_data_generated_cb(self, handler):
886 self.window_sensitive(True)
887
888 def rcppkglist_populated(self):
889 selected_image = self.configuration.selected_image
890 selected_recipes = self.configuration.selected_recipes[:]
891 selected_packages = self.configuration.selected_packages[:]
892 user_selected_packages = self.configuration.user_selected_packages[:]
893
894 self.image_configuration_page.update_image_combo(self.recipe_model, selected_image)
895 self.image_configuration_page.update_image_desc()
896 self.update_recipe_model(selected_image, selected_recipes)
897 self.update_package_model(selected_packages, user_selected_packages)
898
899 def recipelist_changed_cb(self, recipe_model):
900 self.recipe_details_page.refresh_selection()
901
902 def packagelist_changed_cb(self, package_model):
903 self.package_details_page.refresh_selection()
904
905 def handler_recipe_populated_cb(self, handler):
906 self.image_configuration_page.update_progress_bar("Populating recipes", 0.99)
907
908 def handler_package_populated_cb(self, handler):
909 self.image_configuration_page.update_progress_bar("Populating packages", 1.0)
910
911 def handler_parsing_started_cb(self, handler, message):
912 if self.current_step != self.RCPPKGINFO_POPULATING:
913 return
914
915 fraction = 0
916 if message["eventname"] == "TreeDataPreparationStarted":
917 fraction = 0.6 + fraction
918 self.image_configuration_page.stop_button.set_sensitive(False)
919 self.image_configuration_page.update_progress_bar("Generating dependency tree", fraction)
920 else:
921 self.image_configuration_page.stop_button.set_sensitive(True)
922 self.image_configuration_page.update_progress_bar(message["title"], fraction)
923
924 def handler_parsing_cb(self, handler, message):
925 if self.current_step != self.RCPPKGINFO_POPULATING:
926 return
927
928 fraction = message["current"] * 1.0/message["total"]
929 if message["eventname"] == "TreeDataPreparationProgress":
930 fraction = 0.6 + 0.38 * fraction
931 self.image_configuration_page.update_progress_bar("Generating dependency tree", fraction)
932 else:
933 fraction = 0.6 * fraction
934 self.image_configuration_page.update_progress_bar(message["title"], fraction)
935
936 def handler_parsing_completed_cb(self, handler, message):
937 if self.current_step != self.RCPPKGINFO_POPULATING:
938 return
939
940 if message["eventname"] == "TreeDataPreparationCompleted":
941 fraction = 0.98
942 else:
943 fraction = 0.6
944 self.image_configuration_page.update_progress_bar("Generating dependency tree", fraction)
945
946 def handler_build_started_cb(self, running_build):
947 if self.current_step == self.FAST_IMAGE_GENERATING:
948 fraction = 0
949 elif self.current_step == self.IMAGE_GENERATING:
950 if self.previous_step == self.FAST_IMAGE_GENERATING:
951 fraction = 0.9
952 else:
953 fraction = 0
954 elif self.current_step == self.PACKAGE_GENERATING:
955 fraction = 0
956 self.build_details_page.update_progress_bar("Build Started: ", fraction)
957 self.build_details_page.show_configurations(self.configuration, self.parameters)
958
959 def build_succeeded(self):
960 if self.current_step == self.FAST_IMAGE_GENERATING:
961 fraction = 0.9
962 elif self.current_step == self.IMAGE_GENERATING:
963 fraction = 1.0
964 version = ""
965 self.parameters.image_names = []
966 selected_image = self.recipe_model.get_selected_image()
967 if selected_image == self.recipe_model.__custom_image__:
968 if self.configuration.initial_selected_image != selected_image:
969 version = self.recipe_model.get_custom_image_version()
970 linkname = self.hob_image + version + "-" + self.configuration.curr_mach
971 else:
972 linkname = selected_image + '-' + self.configuration.curr_mach
973 image_extension = self.get_image_extension()
974 for image_type in self.parameters.image_types:
975 if image_type in image_extension:
976 real_types = image_extension[image_type]
977 else:
978 real_types = [image_type]
979 for real_image_type in real_types:
980 linkpath = self.parameters.image_addr + '/' + linkname + '.' + real_image_type
981 if os.path.exists(linkpath):
982 self.parameters.image_names.append(os.readlink(linkpath))
983 elif self.current_step == self.PACKAGE_GENERATING:
984 fraction = 1.0
985 self.build_details_page.update_progress_bar("Build Completed: ", fraction)
986 self.handler.build_succeeded_async()
987 self.stopping = False
988
989 if self.current_step == self.PACKAGE_GENERATING:
990 self.switch_page(self.PACKAGE_GENERATED)
991 elif self.current_step == self.IMAGE_GENERATING:
992 self.switch_page(self.IMAGE_GENERATED)
993
994 def build_failed(self):
995 if self.stopping:
996 status = "stop"
997 message = "Build stopped: "
998 fraction = self.build_details_page.progress_bar.get_fraction()
999 stop_to_next_edit = ""
1000 if self.current_step == self.FAST_IMAGE_GENERATING:
1001 stop_to_next_edit = "image configuration"
1002 elif self.current_step == self.IMAGE_GENERATING:
1003 if self.previous_step == self.FAST_IMAGE_GENERATING:
1004 stop_to_next_edit = "image configuration"
1005 else:
1006 stop_to_next_edit = "packages"
1007 elif self.current_step == self.PACKAGE_GENERATING:
1008 stop_to_next_edit = "recipes"
1009 button = self.build_details_page.show_stop_page(stop_to_next_edit.split(' ')[0])
1010 self.set_default(button)
1011 else:
1012 fail_to_next_edit = ""
1013 if self.current_step == self.FAST_IMAGE_GENERATING:
1014 fail_to_next_edit = "image configuration"
1015 fraction = 0.9
1016 elif self.current_step == self.IMAGE_GENERATING:
1017 if self.previous_step == self.FAST_IMAGE_GENERATING:
1018 fail_to_next_edit = "image configuration"
1019 else:
1020 fail_to_next_edit = "packages"
1021 fraction = 1.0
1022 elif self.current_step == self.PACKAGE_GENERATING:
1023 fail_to_next_edit = "recipes"
1024 fraction = 1.0
1025 self.build_details_page.show_fail_page(fail_to_next_edit.split(' ')[0])
1026 status = "fail"
1027 message = "Build failed: "
1028 self.build_details_page.update_progress_bar(message, fraction, status)
1029 self.build_details_page.show_back_button()
1030 self.build_details_page.hide_stop_button()
1031 self.handler.build_failed_async()
1032 self.stopping = False
1033
1034 def handler_build_succeeded_cb(self, running_build):
1035 if not self.stopping:
1036 self.build_succeeded()
1037 else:
1038 self.build_failed()
1039
1040
1041 def handler_build_failed_cb(self, running_build):
1042 self.build_failed()
1043
1044 def handler_build_aborted_cb(self, running_build):
1045 self.build_failed()
1046
1047 def handler_no_provider_cb(self, running_build, msg):
1048 dialog = CrumbsMessageDialog(self, glib.markup_escape_text(msg), gtk.MESSAGE_INFO)
1049 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1050 HobButton.style_button(button)
1051 dialog.run()
1052 dialog.destroy()
1053 self.build_failed()
1054
1055 def handler_task_started_cb(self, running_build, message):
1056 fraction = message["current"] * 1.0/message["total"]
1057 title = "Build packages"
1058 if self.current_step == self.FAST_IMAGE_GENERATING:
1059 if message["eventname"] == "sceneQueueTaskStarted":
1060 fraction = 0.27 * fraction
1061 elif message["eventname"] == "runQueueTaskStarted":
1062 fraction = 0.27 + 0.63 * fraction
1063 elif self.current_step == self.IMAGE_GENERATING:
1064 title = "Build image"
1065 if self.previous_step == self.FAST_IMAGE_GENERATING:
1066 if message["eventname"] == "sceneQueueTaskStarted":
1067 fraction = 0.27 + 0.63 + 0.03 * fraction
1068 elif message["eventname"] == "runQueueTaskStarted":
1069 fraction = 0.27 + 0.63 + 0.03 + 0.07 * fraction
1070 else:
1071 if message["eventname"] == "sceneQueueTaskStarted":
1072 fraction = 0.2 * fraction
1073 elif message["eventname"] == "runQueueTaskStarted":
1074 fraction = 0.2 + 0.8 * fraction
1075 elif self.current_step == self.PACKAGE_GENERATING:
1076 if message["eventname"] == "sceneQueueTaskStarted":
1077 fraction = 0.2 * fraction
1078 elif message["eventname"] == "runQueueTaskStarted":
1079 fraction = 0.2 + 0.8 * fraction
1080 self.build_details_page.update_progress_bar(title + ": ", fraction)
1081 self.build_details_page.update_build_status(message["current"], message["total"], message["task"])
1082
1083 def handler_disk_full_cb(self, running_build):
1084 self.disk_full = True
1085
1086 def handler_build_failure_cb(self, running_build):
1087 self.build_details_page.show_issues()
1088
1089 def handler_build_log_cb(self, running_build, func, obj):
1090 if hasattr(self.logger, func):
1091 getattr(self.logger, func)(obj)
1092
1093 def destroy_window_cb(self, widget, event):
1094 if not self.sensitive:
1095 return True
1096 elif self.handler.building:
1097 self.stop_build()
1098 return True
1099 else:
1100 gtk.main_quit()
1101
1102 def event_handle_SIGINT(self, signal, frame):
1103 for w in gtk.window_list_toplevels():
1104 if w.get_modal():
1105 w.response(gtk.RESPONSE_DELETE_EVENT)
1106 sys.exit(0)
1107
1108 def build_packages(self):
1109 _, all_recipes = self.recipe_model.get_selected_recipes()
1110 if not all_recipes:
1111 lbl = "<b>No selections made</b>"
1112 msg = "You have not made any selections"
1113 msg = msg + " so there isn't anything to bake at this time."
1114 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
1115 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1116 HobButton.style_button(button)
1117 dialog.run()
1118 dialog.destroy()
1119 return
1120 self.generate_packages_async(True)
1121
1122 def build_image(self):
1123 selected_packages = self.package_model.get_selected_packages()
1124 if not selected_packages:
1125 lbl = "<b>No selections made</b>"
1126 msg = "You have not made any selections"
1127 msg = msg + " so there isn't anything to bake at this time."
1128 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
1129 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1130 HobButton.style_button(button)
1131 dialog.run()
1132 dialog.destroy()
1133 return
1134 self.generate_image_async(True)
1135
1136 def just_bake(self):
1137 selected_image = self.recipe_model.get_selected_image()
1138 selected_packages = self.package_model.get_selected_packages() or []
1139
1140 # If no base image and no selected packages don't build anything
1141 if not (selected_packages or selected_image != self.recipe_model.__custom_image__):
1142 lbl = "<b>No selections made</b>"
1143 msg = "You have not made any selections"
1144 msg = msg + " so there isn't anything to bake at this time."
1145 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
1146 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1147 HobButton.style_button(button)
1148 dialog.run()
1149 dialog.destroy()
1150 return
1151
1152 self.fast_generate_image_async(True)
1153
1154 def show_recipe_property_dialog(self, properties):
1155 information = {}
1156 dialog = PropertyDialog(title = properties["name"] +' '+ "properties",
1157 parent = self,
1158 information = properties,
1159 flags = gtk.DIALOG_DESTROY_WITH_PARENT
1160 | gtk.DIALOG_NO_SEPARATOR)
1161
1162 dialog.set_modal(False)
1163
1164 button = dialog.add_button("Close", gtk.RESPONSE_NO)
1165 HobAltButton.style_button(button)
1166 button.connect("clicked", lambda w: dialog.destroy())
1167
1168 dialog.run()
1169
1170 def show_packages_property_dialog(self, properties):
1171 information = {}
1172 dialog = PropertyDialog(title = properties["name"] +' '+ "properties",
1173 parent = self,
1174 information = properties,
1175 flags = gtk.DIALOG_DESTROY_WITH_PARENT
1176 | gtk.DIALOG_NO_SEPARATOR)
1177
1178 dialog.set_modal(False)
1179
1180 button = dialog.add_button("Close", gtk.RESPONSE_NO)
1181 HobAltButton.style_button(button)
1182 button.connect("clicked", lambda w: dialog.destroy())
1183
1184 dialog.run()
1185
1186 def show_layer_selection_dialog(self):
1187 dialog = LayerSelectionDialog(title = "Layers",
1188 layers = copy.deepcopy(self.configuration.layers),
1189 layers_non_removable = copy.deepcopy(self.configuration.layers_non_removable),
1190 all_layers = self.parameters.all_layers,
1191 parent = self,
1192 flags = gtk.DIALOG_MODAL
1193 | gtk.DIALOG_DESTROY_WITH_PARENT
1194 | gtk.DIALOG_NO_SEPARATOR)
1195 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1196 HobAltButton.style_button(button)
1197 button = dialog.add_button("OK", gtk.RESPONSE_YES)
1198 HobButton.style_button(button)
1199 response = dialog.run()
1200 if response == gtk.RESPONSE_YES:
1201 self.configuration.layers = dialog.layers
1202 # DO refresh layers
1203 if dialog.layers_changed:
1204 self.update_config_async()
1205 dialog.destroy()
1206
1207 def get_image_extension(self):
1208 image_extension = {}
1209 for type in self.parameters.image_types:
1210 ext = self.handler.runCommand(["getVariable", "IMAGE_EXTENSION_%s" % type])
1211 if ext:
1212 image_extension[type] = ext.split(' ')
1213
1214 return image_extension
1215
1216 def show_load_my_images_dialog(self):
1217 image_extension = self.get_image_extension()
1218 dialog = ImageSelectionDialog(self.parameters.image_addr, self.parameters.image_types,
1219 "Open My Images", self,
1220 gtk.FILE_CHOOSER_ACTION_SAVE, None,
1221 image_extension)
1222 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1223 HobAltButton.style_button(button)
1224 button = dialog.add_button("Open", gtk.RESPONSE_YES)
1225 HobButton.style_button(button)
1226 response = dialog.run()
1227 if response == gtk.RESPONSE_YES:
1228 if not dialog.image_names:
1229 lbl = "<b>No selections made</b>"
1230 msg = "You have not made any selections"
1231 crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
1232 button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
1233 HobButton.style_button(button)
1234 crumbs_dialog.run()
1235 crumbs_dialog.destroy()
1236 dialog.destroy()
1237 return
1238
1239 self.parameters.image_addr = dialog.image_folder
1240 self.parameters.image_names = dialog.image_names[:]
1241 self.switch_page(self.MY_IMAGE_OPENED)
1242
1243 dialog.destroy()
1244
1245 def show_adv_settings_dialog(self, tab=None):
1246 dialog = AdvancedSettingsDialog(title = "Advanced configuration",
1247 configuration = copy.deepcopy(self.configuration),
1248 all_image_types = self.parameters.image_types,
1249 all_package_formats = self.parameters.all_package_formats,
1250 all_distros = self.parameters.all_distros,
1251 all_sdk_machines = self.parameters.all_sdk_machines,
1252 max_threads = self.parameters.max_threads,
1253 parent = self,
1254 flags = gtk.DIALOG_MODAL
1255 | gtk.DIALOG_DESTROY_WITH_PARENT
1256 | gtk.DIALOG_NO_SEPARATOR)
1257 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1258 HobAltButton.style_button(button)
1259 button = dialog.add_button("Save", gtk.RESPONSE_YES)
1260 HobButton.style_button(button)
1261 dialog.set_save_button(button)
1262 response = dialog.run()
1263 settings_changed = False
1264 if response == gtk.RESPONSE_YES:
1265 self.configuration = dialog.configuration
1266 self.configuration.save(self.handler, True) # remember settings
1267 settings_changed = dialog.settings_changed
1268 dialog.destroy()
1269 return response == gtk.RESPONSE_YES, settings_changed
1270
1271 def show_simple_settings_dialog(self, tab=None):
1272 dialog = SimpleSettingsDialog(title = "Settings",
1273 configuration = copy.deepcopy(self.configuration),
1274 all_image_types = self.parameters.image_types,
1275 all_package_formats = self.parameters.all_package_formats,
1276 all_distros = self.parameters.all_distros,
1277 all_sdk_machines = self.parameters.all_sdk_machines,
1278 max_threads = self.parameters.max_threads,
1279 parent = self,
1280 flags = gtk.DIALOG_MODAL
1281 | gtk.DIALOG_DESTROY_WITH_PARENT
1282 | gtk.DIALOG_NO_SEPARATOR,
1283 handler = self.handler)
1284 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1285 HobAltButton.style_button(button)
1286 button = dialog.add_button("Save", gtk.RESPONSE_YES)
1287 HobButton.style_button(button)
1288 if tab:
1289 dialog.switch_to_page(tab)
1290 response = dialog.run()
1291 settings_changed = False
1292 if response == gtk.RESPONSE_YES:
1293 self.configuration = dialog.configuration
1294 self.configuration.save(self.handler, True) # remember settings
1295 settings_changed = dialog.settings_changed
1296 if dialog.proxy_settings_changed:
1297 self.set_user_config_proxies()
1298 elif dialog.proxy_test_ran:
1299 # The user might have modified the proxies in the "Proxy"
1300 # tab, which in turn made the proxy settings modify in bb.
1301 # If "Cancel" was pressed, restore the previous proxy
1302 # settings inside bb.
1303 self.set_user_config_proxies()
1304 dialog.destroy()
1305 return response == gtk.RESPONSE_YES, settings_changed
1306
1307 def reparse_post_adv_settings(self):
1308 if not self.configuration.curr_mach:
1309 self.update_config_async()
1310 else:
1311 self.configuration.clear_selection()
1312 # DO reparse recipes
1313 self.populate_recipe_package_info_async()
1314
1315 def deploy_image(self, image_name):
1316 if not image_name:
1317 lbl = "<b>Please select an image to deploy.</b>"
1318 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO)
1319 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1320 HobButton.style_button(button)
1321 dialog.run()
1322 dialog.destroy()
1323 return
1324
1325 image_path = os.path.join(self.parameters.image_addr, image_name)
1326 dialog = DeployImageDialog(title = "Usb Image Maker",
1327 image_path = image_path,
1328 parent = self,
1329 flags = gtk.DIALOG_MODAL
1330 | gtk.DIALOG_DESTROY_WITH_PARENT
1331 | gtk.DIALOG_NO_SEPARATOR)
1332 button = dialog.add_button("Close", gtk.RESPONSE_NO)
1333 HobAltButton.style_button(button)
1334 button = dialog.add_button("Make usb image", gtk.RESPONSE_YES)
1335 HobButton.style_button(button)
1336 response = dialog.run()
1337 dialog.destroy()
1338
1339 def show_load_kernel_dialog(self):
1340 dialog = gtk.FileChooserDialog("Load Kernel Files", self,
1341 gtk.FILE_CHOOSER_ACTION_SAVE)
1342 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1343 HobAltButton.style_button(button)
1344 button = dialog.add_button("Open", gtk.RESPONSE_YES)
1345 HobButton.style_button(button)
1346 filter = gtk.FileFilter()
1347 filter.set_name("Kernel Files")
1348 filter.add_pattern("*.bin")
1349 dialog.add_filter(filter)
1350
1351 dialog.set_current_folder(self.parameters.image_addr)
1352
1353 response = dialog.run()
1354 kernel_path = ""
1355 if response == gtk.RESPONSE_YES:
1356 kernel_path = dialog.get_filename()
1357
1358 dialog.destroy()
1359
1360 return kernel_path
1361
1362 def runqemu_image(self, image_name, kernel_name):
1363 if not image_name or not kernel_name:
1364 lbl = "<b>Please select %s to launch in QEMU.</b>" % ("a kernel" if image_name else "an image")
1365 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO)
1366 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1367 HobButton.style_button(button)
1368 dialog.run()
1369 dialog.destroy()
1370 return
1371
1372 kernel_path = os.path.join(self.parameters.image_addr, kernel_name)
1373 image_path = os.path.join(self.parameters.image_addr, image_name)
1374
1375 source_env_path = os.path.join(self.parameters.core_base, "oe-init-build-env")
1376 tmp_path = self.parameters.tmpdir
1377 cmdline = bb.ui.crumbs.utils.which_terminal()
1378 if os.path.exists(image_path) and os.path.exists(kernel_path) \
1379 and os.path.exists(source_env_path) and os.path.exists(tmp_path) \
1380 and cmdline:
1381 cmdline += "\' bash -c \"export OE_TMPDIR=" + tmp_path + "; "
1382 cmdline += "source " + source_env_path + " " + os.getcwd() + "; "
1383 cmdline += "runqemu " + kernel_path + " " + image_path + "\"\'"
1384 subprocess.Popen(shlex.split(cmdline))
1385 else:
1386 lbl = "<b>Path error</b>"
1387 msg = "One of your paths is wrong,"
1388 msg = msg + " please make sure the following paths exist:\n"
1389 msg = msg + "image path:" + image_path + "\n"
1390 msg = msg + "kernel path:" + kernel_path + "\n"
1391 msg = msg + "source environment path:" + source_env_path + "\n"
1392 msg = msg + "tmp path: " + tmp_path + "."
1393 msg = msg + "You may be missing either xterm or vte for terminal services."
1394 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_ERROR, msg)
1395 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1396 HobButton.style_button(button)
1397 dialog.run()
1398 dialog.destroy()
1399
1400 def show_packages(self):
1401 self.package_details_page.refresh_tables()
1402 self.switch_page(self.PACKAGE_SELECTION)
1403
1404 def show_recipes(self):
1405 self.switch_page(self.RECIPE_SELECTION)
1406
1407 def show_image_details(self):
1408 self.switch_page(self.IMAGE_GENERATED)
1409
1410 def show_configuration(self):
1411 self.switch_page(self.BASEIMG_SELECTED)
1412
1413 def stop_build(self):
1414 if self.stopping:
1415 lbl = "<b>Force Stop build?</b>"
1416 msg = "You've already selected Stop once,"
1417 msg = msg + " would you like to 'Force Stop' the build?\n\n"
1418 msg = msg + "This will stop the build as quickly as possible but may"
1419 msg = msg + " well leave your build directory in an unusable state"
1420 msg = msg + " that requires manual steps to fix."
1421 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
1422 button = dialog.add_button("Cancel", gtk.RESPONSE_CANCEL)
1423 HobAltButton.style_button(button)
1424 button = dialog.add_button("Force Stop", gtk.RESPONSE_YES)
1425 HobButton.style_button(button)
1426 else:
1427 lbl = "<b>Stop build?</b>"
1428 msg = "Are you sure you want to stop this"
1429 msg = msg + " build?\n\n'Stop' will stop the build as soon as all in"
1430 msg = msg + " progress build tasks are finished. However if a"
1431 msg = msg + " lengthy compilation phase is in progress this may take"
1432 msg = msg + " some time.\n\n"
1433 msg = msg + "'Force Stop' will stop the build as quickly as"
1434 msg = msg + " possible but may well leave your build directory in an"
1435 msg = msg + " unusable state that requires manual steps to fix."
1436 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
1437 button = dialog.add_button("Cancel", gtk.RESPONSE_CANCEL)
1438 HobAltButton.style_button(button)
1439 button = dialog.add_button("Force stop", gtk.RESPONSE_YES)
1440 HobAltButton.style_button(button)
1441 button = dialog.add_button("Stop", gtk.RESPONSE_OK)
1442 HobButton.style_button(button)
1443 response = dialog.run()
1444 dialog.destroy()
1445 if response != gtk.RESPONSE_CANCEL:
1446 self.stopping = True
1447 if response == gtk.RESPONSE_OK:
1448 self.build_details_page.progress_bar.set_stop_title("Stopping the build....")
1449 self.build_details_page.progress_bar.set_rcstyle("stop")
1450 self.cancel_build_sync()
1451 elif response == gtk.RESPONSE_YES:
1452 self.cancel_build_sync(True)
1453
1454 def do_log(self, consolelogfile = None):
1455 if consolelogfile:
1456 bb.utils.mkdirhier(os.path.dirname(consolelogfile))
1457 if self.consolelog:
1458 self.logger.removeHandler(self.consolelog)
1459 self.consolelog = None
1460 self.consolelog = logging.FileHandler(consolelogfile)
1461 bb.msg.addDefaultlogFilter(self.consolelog)
1462 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
1463 self.consolelog.setFormatter(format)
1464
1465 self.logger.addHandler(self.consolelog)
1466
1467 def get_topdir(self):
1468 return self.handler.get_topdir()
1469
1470 def wait(self, delay):
1471 time_start = time.time()
1472 time_end = time_start + delay
1473 while time_end > time.time():
1474 while gtk.events_pending():
1475 gtk.main_iteration()
diff --git a/bitbake/lib/bb/ui/crumbs/buildmanager.py b/bitbake/lib/bb/ui/crumbs/buildmanager.py
new file mode 100644
index 0000000000..e858d75e4c
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/buildmanager.py
@@ -0,0 +1,455 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2008 Intel Corporation
5#
6# Authored by Rob Bradford <rob@linux.intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22import gobject
23import threading
24import os
25import datetime
26import time
27
28class BuildConfiguration:
29 """ Represents a potential *or* historic *or* concrete build. It
30 encompasses all the things that we need to tell bitbake to do to make it
31 build what we want it to build.
32
33 It also stored the metadata URL and the set of possible machines (and the
34 distros / images / uris for these. Apart from the metdata URL these are
35 not serialised to file (since they may be transient). In some ways this
36 functionality might be shifted to the loader class."""
37
38 def __init__ (self):
39 self.metadata_url = None
40
41 # Tuple of (distros, image, urls)
42 self.machine_options = {}
43
44 self.machine = None
45 self.distro = None
46 self.image = None
47 self.urls = []
48 self.extra_urls = []
49 self.extra_pkgs = []
50
51 def get_machines_model (self):
52 model = gtk.ListStore (gobject.TYPE_STRING)
53 for machine in self.machine_options.keys():
54 model.append ([machine])
55
56 return model
57
58 def get_distro_and_images_models (self, machine):
59 distro_model = gtk.ListStore (gobject.TYPE_STRING)
60
61 for distro in self.machine_options[machine][0]:
62 distro_model.append ([distro])
63
64 image_model = gtk.ListStore (gobject.TYPE_STRING)
65
66 for image in self.machine_options[machine][1]:
67 image_model.append ([image])
68
69 return (distro_model, image_model)
70
71 def get_repos (self):
72 self.urls = self.machine_options[self.machine][2]
73 return self.urls
74
75 # It might be a lot lot better if we stored these in like, bitbake conf
76 # file format.
77 @staticmethod
78 def load_from_file (filename):
79
80 conf = BuildConfiguration()
81 with open(filename, "r") as f:
82 for line in f:
83 data = line.split (";")[1]
84 if (line.startswith ("metadata-url;")):
85 conf.metadata_url = data.strip()
86 continue
87 if (line.startswith ("url;")):
88 conf.urls += [data.strip()]
89 continue
90 if (line.startswith ("extra-url;")):
91 conf.extra_urls += [data.strip()]
92 continue
93 if (line.startswith ("machine;")):
94 conf.machine = data.strip()
95 continue
96 if (line.startswith ("distribution;")):
97 conf.distro = data.strip()
98 continue
99 if (line.startswith ("image;")):
100 conf.image = data.strip()
101 continue
102
103 return conf
104
105 # Serialise to a file. This is part of the build process and we use this
106 # to be able to repeat a given build (using the same set of parameters)
107 # but also so that we can include the details of the image / machine /
108 # distro in the build manager tree view.
109 def write_to_file (self, filename):
110 f = open (filename, "w")
111
112 lines = []
113
114 if (self.metadata_url):
115 lines += ["metadata-url;%s\n" % (self.metadata_url)]
116
117 for url in self.urls:
118 lines += ["url;%s\n" % (url)]
119
120 for url in self.extra_urls:
121 lines += ["extra-url;%s\n" % (url)]
122
123 if (self.machine):
124 lines += ["machine;%s\n" % (self.machine)]
125
126 if (self.distro):
127 lines += ["distribution;%s\n" % (self.distro)]
128
129 if (self.image):
130 lines += ["image;%s\n" % (self.image)]
131
132 f.writelines (lines)
133 f.close ()
134
135class BuildResult(gobject.GObject):
136 """ Represents an historic build. Perhaps not successful. But it includes
137 things such as the files that are in the directory (the output from the
138 build) as well as a deserialised BuildConfiguration file that is stored in
139 ".conf" in the directory for the build.
140
141 This is GObject so that it can be included in the TreeStore."""
142
143 (STATE_COMPLETE, STATE_FAILED, STATE_ONGOING) = \
144 (0, 1, 2)
145
146 def __init__ (self, parent, identifier):
147 gobject.GObject.__init__ (self)
148 self.date = None
149
150 self.files = []
151 self.status = None
152 self.identifier = identifier
153 self.path = os.path.join (parent, identifier)
154
155 # Extract the date, since the directory name is of the
156 # format build-<year><month><day>-<ordinal> we can easily
157 # pull it out.
158 # TODO: Better to stat a file?
159 (_, date, revision) = identifier.split ("-")
160 print(date)
161
162 year = int (date[0:4])
163 month = int (date[4:6])
164 day = int (date[6:8])
165
166 self.date = datetime.date (year, month, day)
167
168 self.conf = None
169
170 # By default builds are STATE_FAILED unless we find a "complete" file
171 # in which case they are STATE_COMPLETE
172 self.state = BuildResult.STATE_FAILED
173 for file in os.listdir (self.path):
174 if (file.startswith (".conf")):
175 conffile = os.path.join (self.path, file)
176 self.conf = BuildConfiguration.load_from_file (conffile)
177 elif (file.startswith ("complete")):
178 self.state = BuildResult.STATE_COMPLETE
179 else:
180 self.add_file (file)
181
182 def add_file (self, file):
183 # Just add the file for now. Don't care about the type.
184 self.files += [(file, None)]
185
186class BuildManagerModel (gtk.TreeStore):
187 """ Model for the BuildManagerTreeView. This derives from gtk.TreeStore
188 but it abstracts nicely what the columns mean and the setup of the columns
189 in the model. """
190
191 (COL_IDENT, COL_DESC, COL_MACHINE, COL_DISTRO, COL_BUILD_RESULT, COL_DATE, COL_STATE) = \
192 (0, 1, 2, 3, 4, 5, 6)
193
194 def __init__ (self):
195 gtk.TreeStore.__init__ (self,
196 gobject.TYPE_STRING,
197 gobject.TYPE_STRING,
198 gobject.TYPE_STRING,
199 gobject.TYPE_STRING,
200 gobject.TYPE_OBJECT,
201 gobject.TYPE_INT64,
202 gobject.TYPE_INT)
203
204class BuildManager (gobject.GObject):
205 """ This class manages the historic builds that have been found in the
206 "results" directory but is also used for starting a new build."""
207
208 __gsignals__ = {
209 'population-finished' : (gobject.SIGNAL_RUN_LAST,
210 gobject.TYPE_NONE,
211 ()),
212 'populate-error' : (gobject.SIGNAL_RUN_LAST,
213 gobject.TYPE_NONE,
214 ())
215 }
216
217 def update_build_result (self, result, iter):
218 # Convert the date into something we can sort by.
219 date = long (time.mktime (result.date.timetuple()))
220
221 # Add a top level entry for the build
222
223 self.model.set (iter,
224 BuildManagerModel.COL_IDENT, result.identifier,
225 BuildManagerModel.COL_DESC, result.conf.image,
226 BuildManagerModel.COL_MACHINE, result.conf.machine,
227 BuildManagerModel.COL_DISTRO, result.conf.distro,
228 BuildManagerModel.COL_BUILD_RESULT, result,
229 BuildManagerModel.COL_DATE, date,
230 BuildManagerModel.COL_STATE, result.state)
231
232 # And then we use the files in the directory as the children for the
233 # top level iter.
234 for file in result.files:
235 self.model.append (iter, (None, file[0], None, None, None, date, -1))
236
237 # This function is called as an idle by the BuildManagerPopulaterThread
238 def add_build_result (self, result):
239 gtk.gdk.threads_enter()
240 self.known_builds += [result]
241
242 self.update_build_result (result, self.model.append (None))
243
244 gtk.gdk.threads_leave()
245
246 def notify_build_finished (self):
247 # This is a bit of a hack. If we have a running build running then we
248 # will have a row in the model in STATE_ONGOING. Find it and make it
249 # as if it was a proper historic build (well, it is completed now....)
250
251 # We need to use the iters here rather than the Python iterator
252 # interface to the model since we need to pass it into
253 # update_build_result
254
255 iter = self.model.get_iter_first()
256
257 while (iter):
258 (ident, state) = self.model.get(iter,
259 BuildManagerModel.COL_IDENT,
260 BuildManagerModel.COL_STATE)
261
262 if state == BuildResult.STATE_ONGOING:
263 result = BuildResult (self.results_directory, ident)
264 self.update_build_result (result, iter)
265 iter = self.model.iter_next(iter)
266
267 def notify_build_succeeded (self):
268 # Write the "complete" file so that when we create the BuildResult
269 # object we put into the model
270
271 complete_file_path = os.path.join (self.cur_build_directory, "complete")
272 f = file (complete_file_path, "w")
273 f.close()
274 self.notify_build_finished()
275
276 def notify_build_failed (self):
277 # Without a "complete" file then this will mark the build as failed:
278 self.notify_build_finished()
279
280 # This function is called as an idle
281 def emit_population_finished_signal (self):
282 gtk.gdk.threads_enter()
283 self.emit ("population-finished")
284 gtk.gdk.threads_leave()
285
286 class BuildManagerPopulaterThread (threading.Thread):
287 def __init__ (self, manager, directory):
288 threading.Thread.__init__ (self)
289 self.manager = manager
290 self.directory = directory
291
292 def run (self):
293 # For each of the "build-<...>" directories ..
294
295 if os.path.exists (self.directory):
296 for directory in os.listdir (self.directory):
297
298 if not directory.startswith ("build-"):
299 continue
300
301 build_result = BuildResult (self.directory, directory)
302 self.manager.add_build_result (build_result)
303
304 gobject.idle_add (BuildManager.emit_population_finished_signal,
305 self.manager)
306
307 def __init__ (self, server, results_directory):
308 gobject.GObject.__init__ (self)
309
310 # The builds that we've found from walking the result directory
311 self.known_builds = []
312
313 # Save out the bitbake server, we need this for issuing commands to
314 # the cooker:
315 self.server = server
316
317 # The TreeStore that we use
318 self.model = BuildManagerModel ()
319
320 # The results directory is where we create (and look for) the
321 # build-<xyz>-<n> directories. We need to populate ourselves from
322 # directory
323 self.results_directory = results_directory
324 self.populate_from_directory (self.results_directory)
325
326 def populate_from_directory (self, directory):
327 thread = BuildManager.BuildManagerPopulaterThread (self, directory)
328 thread.start()
329
330 # Come up with the name for the next build ident by combining "build-"
331 # with the date formatted as yyyymmdd and then an ordinal. We do this by
332 # an optimistic algorithm incrementing the ordinal if we find that it
333 # already exists.
334 def get_next_build_ident (self):
335 today = datetime.date.today ()
336 datestr = str (today.year) + str (today.month) + str (today.day)
337
338 revision = 0
339 test_name = "build-%s-%d" % (datestr, revision)
340 test_path = os.path.join (self.results_directory, test_name)
341
342 while (os.path.exists (test_path)):
343 revision += 1
344 test_name = "build-%s-%d" % (datestr, revision)
345 test_path = os.path.join (self.results_directory, test_name)
346
347 return test_name
348
349 # Take a BuildConfiguration and then try and build it based on the
350 # parameters of that configuration. S
351 def do_build (self, conf):
352 server = self.server
353
354 # Work out the build directory. Note we actually create the
355 # directories here since we need to write the ".conf" file. Otherwise
356 # we could have relied on bitbake's builder thread to actually make
357 # the directories as it proceeds with the build.
358 ident = self.get_next_build_ident ()
359 build_directory = os.path.join (self.results_directory,
360 ident)
361 self.cur_build_directory = build_directory
362 os.makedirs (build_directory)
363
364 conffile = os.path.join (build_directory, ".conf")
365 conf.write_to_file (conffile)
366
367 # Add a row to the model representing this ongoing build. It's kinda a
368 # fake entry. If this build completes or fails then this gets updated
369 # with the real stuff like the historic builds
370 date = long (time.time())
371 self.model.append (None, (ident, conf.image, conf.machine, conf.distro,
372 None, date, BuildResult.STATE_ONGOING))
373 try:
374 server.runCommand(["setVariable", "BUILD_IMAGES_FROM_FEEDS", 1])
375 server.runCommand(["setVariable", "MACHINE", conf.machine])
376 server.runCommand(["setVariable", "DISTRO", conf.distro])
377 server.runCommand(["setVariable", "PACKAGE_CLASSES", "package_ipk"])
378 server.runCommand(["setVariable", "BBFILES", \
379 """${OEROOT}/meta/packages/*/*.bb ${OEROOT}/meta-moblin/packages/*/*.bb"""])
380 server.runCommand(["setVariable", "TMPDIR", "${OEROOT}/build/tmp"])
381 server.runCommand(["setVariable", "IPK_FEED_URIS", \
382 " ".join(conf.get_repos())])
383 server.runCommand(["setVariable", "DEPLOY_DIR_IMAGE",
384 build_directory])
385 server.runCommand(["buildTargets", [conf.image], "rootfs"])
386
387 except Exception as e:
388 print(e)
389
390class BuildManagerTreeView (gtk.TreeView):
391 """ The tree view for the build manager. This shows the historic builds
392 and so forth. """
393
394 # We use this function to control what goes in the cell since we store
395 # the date in the model as seconds since the epoch (for sorting) and so we
396 # need to make it human readable.
397 def date_format_custom_cell_data_func (self, col, cell, model, iter):
398 date = model.get (iter, BuildManagerModel.COL_DATE)[0]
399 datestr = time.strftime("%A %d %B %Y", time.localtime(date))
400 cell.set_property ("text", datestr)
401
402 # This format function controls what goes in the cell. We use this to map
403 # the integer state to a string and also to colourise the text
404 def state_format_custom_cell_data_fun (self, col, cell, model, iter):
405 state = model.get (iter, BuildManagerModel.COL_STATE)[0]
406
407 if (state == BuildResult.STATE_ONGOING):
408 cell.set_property ("text", "Active")
409 cell.set_property ("foreground", "#000000")
410 elif (state == BuildResult.STATE_FAILED):
411 cell.set_property ("text", "Failed")
412 cell.set_property ("foreground", "#ff0000")
413 elif (state == BuildResult.STATE_COMPLETE):
414 cell.set_property ("text", "Complete")
415 cell.set_property ("foreground", "#00ff00")
416 else:
417 cell.set_property ("text", "")
418
419 def __init__ (self):
420 gtk.TreeView.__init__(self)
421
422 # Misc descriptiony thing
423 renderer = gtk.CellRendererText ()
424 col = gtk.TreeViewColumn (None, renderer,
425 text=BuildManagerModel.COL_DESC)
426 self.append_column (col)
427
428 # Machine
429 renderer = gtk.CellRendererText ()
430 col = gtk.TreeViewColumn ("Machine", renderer,
431 text=BuildManagerModel.COL_MACHINE)
432 self.append_column (col)
433
434 # distro
435 renderer = gtk.CellRendererText ()
436 col = gtk.TreeViewColumn ("Distribution", renderer,
437 text=BuildManagerModel.COL_DISTRO)
438 self.append_column (col)
439
440 # date (using a custom function for formatting the cell contents it
441 # takes epoch -> human readable string)
442 renderer = gtk.CellRendererText ()
443 col = gtk.TreeViewColumn ("Date", renderer,
444 text=BuildManagerModel.COL_DATE)
445 self.append_column (col)
446 col.set_cell_data_func (renderer,
447 self.date_format_custom_cell_data_func)
448
449 # For status.
450 renderer = gtk.CellRendererText ()
451 col = gtk.TreeViewColumn ("Status", renderer,
452 text = BuildManagerModel.COL_STATE)
453 self.append_column (col)
454 col.set_cell_data_func (renderer,
455 self.state_format_custom_cell_data_fun)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/__init__.py b/bitbake/lib/bb/ui/crumbs/hig/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/__init__.py
diff --git a/bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py
new file mode 100644
index 0000000000..e0b3553c2f
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py
@@ -0,0 +1,341 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import hashlib
25from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton
26from bb.ui.crumbs.progressbar import HobProgressBar
27from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
28from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
29from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
30from bb.ui.crumbs.hig.proxydetailsdialog import ProxyDetailsDialog
31
32"""
33The following are convenience classes for implementing GNOME HIG compliant
34BitBake GUI's
35In summary: spacing = 12px, border-width = 6px
36"""
37
38class AdvancedSettingsDialog (CrumbsDialog, SettingsUIHelper):
39
40 def details_cb(self, button, parent, protocol):
41 dialog = ProxyDetailsDialog(title = protocol.upper() + " Proxy Details",
42 user = self.configuration.proxies[protocol][1],
43 passwd = self.configuration.proxies[protocol][2],
44 parent = parent,
45 flags = gtk.DIALOG_MODAL
46 | gtk.DIALOG_DESTROY_WITH_PARENT
47 | gtk.DIALOG_NO_SEPARATOR)
48 dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
49 response = dialog.run()
50 if response == gtk.RESPONSE_OK:
51 self.configuration.proxies[protocol][1] = dialog.user
52 self.configuration.proxies[protocol][2] = dialog.passwd
53 self.refresh_proxy_components()
54 dialog.destroy()
55
56 def set_save_button(self, button):
57 self.save_button = button
58
59 def rootfs_combo_changed_cb(self, rootfs_combo, all_package_format, check_hbox):
60 combo_item = self.rootfs_combo.get_active_text()
61 modified = False
62 for child in check_hbox.get_children():
63 if isinstance(child, gtk.CheckButton):
64 check_hbox.remove(child)
65 modified = True
66 for format in all_package_format:
67 if format != combo_item:
68 check_button = gtk.CheckButton(format)
69 check_hbox.pack_start(check_button, expand=False, fill=False)
70 modified = True
71 if modified:
72 check_hbox.remove(self.pkgfmt_info)
73 check_hbox.pack_start(self.pkgfmt_info, expand=False, fill=False)
74 check_hbox.show_all()
75
76 def gen_pkgfmt_widget(self, curr_package_format, all_package_format, tooltip_combo="", tooltip_extra=""):
77 pkgfmt_vbox = gtk.VBox(False, 6)
78
79 label = self.gen_label_widget("Root file system package format")
80 pkgfmt_vbox.pack_start(label, expand=False, fill=False)
81
82 rootfs_format = ""
83 if curr_package_format:
84 rootfs_format = curr_package_format.split()[0]
85
86 rootfs_format_widget, rootfs_combo = self.gen_combo_widget(rootfs_format, all_package_format, tooltip_combo)
87 pkgfmt_vbox.pack_start(rootfs_format_widget, expand=False, fill=False)
88
89 label = self.gen_label_widget("Additional package formats")
90 pkgfmt_vbox.pack_start(label, expand=False, fill=False)
91
92 check_hbox = gtk.HBox(False, 12)
93 pkgfmt_vbox.pack_start(check_hbox, expand=False, fill=False)
94 for format in all_package_format:
95 if format != rootfs_format:
96 check_button = gtk.CheckButton(format)
97 is_active = (format in curr_package_format.split())
98 check_button.set_active(is_active)
99 check_hbox.pack_start(check_button, expand=False, fill=False)
100
101 self.pkgfmt_info = HobInfoButton(tooltip_extra, self)
102 check_hbox.pack_start(self.pkgfmt_info, expand=False, fill=False)
103
104 rootfs_combo.connect("changed", self.rootfs_combo_changed_cb, all_package_format, check_hbox)
105
106 pkgfmt_vbox.show_all()
107
108 return pkgfmt_vbox, rootfs_combo, check_hbox
109
110 def __init__(self, title, configuration, all_image_types,
111 all_package_formats, all_distros, all_sdk_machines,
112 max_threads, parent, flags, buttons=None):
113 super(AdvancedSettingsDialog, self).__init__(title, parent, flags, buttons)
114
115 # class members from other objects
116 # bitbake settings from Builder.Configuration
117 self.configuration = configuration
118 self.image_types = all_image_types
119 self.all_package_formats = all_package_formats
120 self.all_distros = all_distros[:]
121 self.all_sdk_machines = all_sdk_machines
122 self.max_threads = max_threads
123
124 # class members for internal use
125 self.distro_combo = None
126 self.dldir_text = None
127 self.sstatedir_text = None
128 self.sstatemirror_text = None
129 self.bb_spinner = None
130 self.pmake_spinner = None
131 self.rootfs_size_spinner = None
132 self.extra_size_spinner = None
133 self.gplv3_checkbox = None
134 self.sdk_checkbox = None
135 self.image_types_checkbuttons = {}
136
137 self.md5 = self.config_md5()
138 self.settings_changed = False
139
140 # create visual elements on the dialog
141 self.save_button = None
142 self.create_visual_elements()
143 self.connect("response", self.response_cb)
144
145 def _get_sorted_value(self, var):
146 return " ".join(sorted(str(var).split())) + "\n"
147
148 def config_md5(self):
149 data = ""
150 data += ("PACKAGE_CLASSES: " + self.configuration.curr_package_format + '\n')
151 data += ("DISTRO: " + self._get_sorted_value(self.configuration.curr_distro))
152 data += ("IMAGE_ROOTFS_SIZE: " + self._get_sorted_value(self.configuration.image_rootfs_size))
153 data += ("IMAGE_EXTRA_SIZE: " + self._get_sorted_value(self.configuration.image_extra_size))
154 data += ("INCOMPATIBLE_LICENSE: " + self._get_sorted_value(self.configuration.incompat_license))
155 data += ("SDK_MACHINE: " + self._get_sorted_value(self.configuration.curr_sdk_machine))
156 data += ("TOOLCHAIN_BUILD: " + self._get_sorted_value(self.configuration.toolchain_build))
157 data += ("IMAGE_FSTYPES: " + self._get_sorted_value(self.configuration.image_fstypes))
158 return hashlib.md5(data).hexdigest()
159
160 def create_visual_elements(self):
161 self.nb = gtk.Notebook()
162 self.nb.set_show_tabs(True)
163 self.nb.append_page(self.create_image_types_page(), gtk.Label("Image types"))
164 self.nb.append_page(self.create_output_page(), gtk.Label("Output"))
165 self.nb.set_current_page(0)
166 self.vbox.pack_start(self.nb, expand=True, fill=True)
167 self.vbox.pack_end(gtk.HSeparator(), expand=True, fill=True)
168
169 self.show_all()
170
171 def get_num_checked_image_types(self):
172 total = 0
173 for b in self.image_types_checkbuttons.values():
174 if b.get_active():
175 total = total + 1
176 return total
177
178 def set_save_button_state(self):
179 if self.save_button:
180 self.save_button.set_sensitive(self.get_num_checked_image_types() > 0)
181
182 def image_type_checkbutton_clicked_cb(self, button):
183 self.set_save_button_state()
184 if self.get_num_checked_image_types() == 0:
185 # Show an error dialog
186 lbl = "<b>Select an image type</b>"
187 msg = "You need to select at least one image type."
188 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
189 button = dialog.add_button("OK", gtk.RESPONSE_OK)
190 HobButton.style_button(button)
191 response = dialog.run()
192 dialog.destroy()
193
194 def create_image_types_page(self):
195 main_vbox = gtk.VBox(False, 16)
196 main_vbox.set_border_width(6)
197
198 advanced_vbox = gtk.VBox(False, 6)
199 advanced_vbox.set_border_width(6)
200
201 distro_vbox = gtk.VBox(False, 6)
202 label = self.gen_label_widget("Distro:")
203 tooltip = "Selects the Yocto Project distribution you want"
204 try:
205 i = self.all_distros.index( "defaultsetup" )
206 except ValueError:
207 i = -1
208 if i != -1:
209 self.all_distros[ i ] = "Default"
210 if self.configuration.curr_distro == "defaultsetup":
211 self.configuration.curr_distro = "Default"
212 distro_widget, self.distro_combo = self.gen_combo_widget(self.configuration.curr_distro, self.all_distros,"<b>Distro</b>" + "*" + tooltip)
213 distro_vbox.pack_start(label, expand=False, fill=False)
214 distro_vbox.pack_start(distro_widget, expand=False, fill=False)
215 main_vbox.pack_start(distro_vbox, expand=False, fill=False)
216
217
218 rows = (len(self.image_types)+1)/3
219 table = gtk.Table(rows + 1, 10, True)
220 advanced_vbox.pack_start(table, expand=False, fill=False)
221
222 tooltip = "Image file system types you want."
223 info = HobInfoButton("<b>Image types</b>" + "*" + tooltip, self)
224 label = self.gen_label_widget("Image types:")
225 align = gtk.Alignment(0, 0.5, 0, 0)
226 table.attach(align, 0, 4, 0, 1)
227 align.add(label)
228 table.attach(info, 4, 5, 0, 1)
229
230 i = 1
231 j = 1
232 for image_type in sorted(self.image_types):
233 self.image_types_checkbuttons[image_type] = gtk.CheckButton(image_type)
234 self.image_types_checkbuttons[image_type].connect("toggled", self.image_type_checkbutton_clicked_cb)
235 article = ""
236 if image_type.startswith(("a", "e", "i", "o", "u")):
237 article = "n"
238 if image_type == "live":
239 self.image_types_checkbuttons[image_type].set_tooltip_text("Build iso and hddimg images")
240 else:
241 self.image_types_checkbuttons[image_type].set_tooltip_text("Build a%s %s image" % (article, image_type))
242 table.attach(self.image_types_checkbuttons[image_type], j - 1, j + 3, i, i + 1)
243 if image_type in self.configuration.image_fstypes.split():
244 self.image_types_checkbuttons[image_type].set_active(True)
245 i += 1
246 if i > rows:
247 i = 1
248 j = j + 4
249
250 main_vbox.pack_start(advanced_vbox, expand=False, fill=False)
251 self.set_save_button_state()
252
253 return main_vbox
254
255 def create_output_page(self):
256 advanced_vbox = gtk.VBox(False, 6)
257 advanced_vbox.set_border_width(6)
258
259 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Package format</span>'), expand=False, fill=False)
260 sub_vbox = gtk.VBox(False, 6)
261 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
262 tooltip_combo = "Selects the package format used to generate rootfs."
263 tooltip_extra = "Selects extra package formats to build"
264 pkgfmt_widget, self.rootfs_combo, self.check_hbox = self.gen_pkgfmt_widget(self.configuration.curr_package_format, self.all_package_formats,"<b>Root file system package format</b>" + "*" + tooltip_combo,"<b>Additional package formats</b>" + "*" + tooltip_extra)
265 sub_vbox.pack_start(pkgfmt_widget, expand=False, fill=False)
266
267 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Image size</span>'), expand=False, fill=False)
268 sub_vbox = gtk.VBox(False, 6)
269 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
270 label = self.gen_label_widget("Image basic size (in MB)")
271 tooltip = "Defines the size for the generated image. The OpenEmbedded build system determines the final size for the generated image using an algorithm that takes into account the initial disk space used for the generated image, the Image basic size value, and the Additional free space value.\n\nFor more information, check the <a href=\"http://www.yoctoproject.org/docs/current/poky-ref-manual/poky-ref-manual.html#var-IMAGE_ROOTFS_SIZE\">Yocto Project Reference Manual</a>."
272 rootfs_size_widget, self.rootfs_size_spinner = self.gen_spinner_widget(int(self.configuration.image_rootfs_size*1.0/1024), 0, 65536,"<b>Image basic size</b>" + "*" + tooltip)
273 sub_vbox.pack_start(label, expand=False, fill=False)
274 sub_vbox.pack_start(rootfs_size_widget, expand=False, fill=False)
275
276 sub_vbox = gtk.VBox(False, 6)
277 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
278 label = self.gen_label_widget("Additional free space (in MB)")
279 tooltip = "Sets extra free disk space to be added to the generated image. Use this variable when you want to ensure that a specific amount of free disk space is available on a device after an image is installed and running."
280 extra_size_widget, self.extra_size_spinner = self.gen_spinner_widget(int(self.configuration.image_extra_size*1.0/1024), 0, 65536,"<b>Additional free space</b>" + "*" + tooltip)
281 sub_vbox.pack_start(label, expand=False, fill=False)
282 sub_vbox.pack_start(extra_size_widget, expand=False, fill=False)
283
284 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Licensing</span>'), expand=False, fill=False)
285 self.gplv3_checkbox = gtk.CheckButton("Exclude GPLv3 packages")
286 self.gplv3_checkbox.set_tooltip_text("Check this box to prevent GPLv3 packages from being included in your image")
287 if "GPLv3" in self.configuration.incompat_license.split():
288 self.gplv3_checkbox.set_active(True)
289 else:
290 self.gplv3_checkbox.set_active(False)
291 advanced_vbox.pack_start(self.gplv3_checkbox, expand=False, fill=False)
292
293 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">SDK</span>'), expand=False, fill=False)
294 sub_hbox = gtk.HBox(False, 6)
295 advanced_vbox.pack_start(sub_hbox, expand=False, fill=False)
296 self.sdk_checkbox = gtk.CheckButton("Populate SDK")
297 tooltip = "Check this box to generate an SDK tarball that consists of the cross-toolchain and a sysroot that contains development packages for your image."
298 self.sdk_checkbox.set_tooltip_text(tooltip)
299 self.sdk_checkbox.set_active(self.configuration.toolchain_build)
300 sub_hbox.pack_start(self.sdk_checkbox, expand=False, fill=False)
301
302 tooltip = "Select the host platform for which you want to run the toolchain contained in the SDK tarball."
303 sdk_machine_widget, self.sdk_machine_combo = self.gen_combo_widget(self.configuration.curr_sdk_machine, self.all_sdk_machines,"<b>Populate SDK</b>" + "*" + tooltip)
304 sub_hbox.pack_start(sdk_machine_widget, expand=False, fill=False)
305
306 return advanced_vbox
307
308 def response_cb(self, dialog, response_id):
309 package_format = []
310 package_format.append(self.rootfs_combo.get_active_text())
311 for child in self.check_hbox:
312 if isinstance(child, gtk.CheckButton) and child.get_active():
313 package_format.append(child.get_label())
314 self.configuration.curr_package_format = " ".join(package_format)
315
316 distro = self.distro_combo.get_active_text()
317 if distro == "Default":
318 distro = "defaultsetup"
319 self.configuration.curr_distro = distro
320 self.configuration.image_rootfs_size = self.rootfs_size_spinner.get_value_as_int() * 1024
321 self.configuration.image_extra_size = self.extra_size_spinner.get_value_as_int() * 1024
322
323 self.configuration.image_fstypes = ""
324 for image_type in self.image_types:
325 if self.image_types_checkbuttons[image_type].get_active():
326 self.configuration.image_fstypes += (" " + image_type)
327 self.configuration.image_fstypes.strip()
328
329 if self.gplv3_checkbox.get_active():
330 if "GPLv3" not in self.configuration.incompat_license.split():
331 self.configuration.incompat_license += " GPLv3"
332 else:
333 if "GPLv3" in self.configuration.incompat_license.split():
334 self.configuration.incompat_license = self.configuration.incompat_license.split().remove("GPLv3")
335 self.configuration.incompat_license = " ".join(self.configuration.incompat_license or [])
336 self.configuration.incompat_license = self.configuration.incompat_license.strip()
337
338 self.configuration.toolchain_build = self.sdk_checkbox.get_active()
339 self.configuration.curr_sdk_machine = self.sdk_machine_combo.get_active_text()
340 md5 = self.config_md5()
341 self.settings_changed = (self.md5 != md5)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
new file mode 100644
index 0000000000..c679f9a070
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
@@ -0,0 +1,44 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24
25"""
26The following are convenience classes for implementing GNOME HIG compliant
27BitBake GUI's
28In summary: spacing = 12px, border-width = 6px
29"""
30
31class CrumbsDialog(gtk.Dialog):
32 """
33 A GNOME HIG compliant dialog widget.
34 Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
35 """
36 def __init__(self, title="", parent=None, flags=0, buttons=None):
37 super(CrumbsDialog, self).__init__(title, parent, flags, buttons)
38
39 self.set_property("has-separator", False) # note: deprecated in 2.22
40
41 self.set_border_width(6)
42 self.vbox.set_property("spacing", 12)
43 self.action_area.set_property("spacing", 12)
44 self.action_area.set_property("border-width", 6)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py b/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
new file mode 100644
index 0000000000..3b998e4637
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
@@ -0,0 +1,70 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import glib
24import gtk
25from bb.ui.crumbs.hobwidget import HobIconChecker
26from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
27
28"""
29The following are convenience classes for implementing GNOME HIG compliant
30BitBake GUI's
31In summary: spacing = 12px, border-width = 6px
32"""
33
34class CrumbsMessageDialog(gtk.MessageDialog):
35 """
36 A GNOME HIG compliant dialog widget.
37 Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
38 """
39 def __init__(self, parent = None, label="", dialog_type = gtk.MESSAGE_QUESTION, msg=""):
40 super(CrumbsMessageDialog, self).__init__(None,
41 gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
42 dialog_type,
43 gtk.BUTTONS_NONE,
44 None)
45
46 self.set_skip_taskbar_hint(False)
47
48 self.set_markup(label)
49
50 if 0 <= len(msg) < 300:
51 self.format_secondary_markup(msg)
52 else:
53 vbox = self.get_message_area()
54 vbox.set_border_width(1)
55 vbox.set_property("spacing", 12)
56 self.textWindow = gtk.ScrolledWindow()
57 self.textWindow.set_shadow_type(gtk.SHADOW_IN)
58 self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
59 self.msgView = gtk.TextView()
60 self.msgView.set_editable(False)
61 self.msgView.set_wrap_mode(gtk.WRAP_WORD)
62 self.msgView.set_cursor_visible(False)
63 self.msgView.set_size_request(300, 300)
64 self.buf = gtk.TextBuffer()
65 self.buf.set_text(msg)
66 self.msgView.set_buffer(self.buf)
67 self.textWindow.add(self.msgView)
68 self.msgView.show()
69 vbox.add(self.textWindow)
70 self.textWindow.show()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py b/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
new file mode 100644
index 0000000000..a13fff906a
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
@@ -0,0 +1,219 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import glob
24import gtk
25import gobject
26import os
27import re
28import shlex
29import subprocess
30import tempfile
31from bb.ui.crumbs.hobwidget import hic, HobButton
32from bb.ui.crumbs.progressbar import HobProgressBar
33import bb.ui.crumbs.utils
34import bb.process
35from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
36from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
37
38"""
39The following are convenience classes for implementing GNOME HIG compliant
40BitBake GUI's
41In summary: spacing = 12px, border-width = 6px
42"""
43
44class DeployImageDialog (CrumbsDialog):
45
46 __dummy_usb__ = "--select a usb drive--"
47
48 def __init__(self, title, image_path, parent, flags, buttons=None, standalone=False):
49 super(DeployImageDialog, self).__init__(title, parent, flags, buttons)
50
51 self.image_path = image_path
52 self.standalone = standalone
53
54 self.create_visual_elements()
55 self.connect("response", self.response_cb)
56
57 def create_visual_elements(self):
58 self.set_size_request(600, 400)
59 label = gtk.Label()
60 label.set_alignment(0.0, 0.5)
61 markup = "<span font_desc='12'>The image to be written into usb drive:</span>"
62 label.set_markup(markup)
63 self.vbox.pack_start(label, expand=False, fill=False, padding=2)
64
65 table = gtk.Table(2, 10, False)
66 table.set_col_spacings(5)
67 table.set_row_spacings(5)
68 self.vbox.pack_start(table, expand=True, fill=True)
69
70 scroll = gtk.ScrolledWindow()
71 scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
72 scroll.set_shadow_type(gtk.SHADOW_IN)
73 tv = gtk.TextView()
74 tv.set_editable(False)
75 tv.set_wrap_mode(gtk.WRAP_WORD)
76 tv.set_cursor_visible(False)
77 self.buf = gtk.TextBuffer()
78 self.buf.set_text(self.image_path)
79 tv.set_buffer(self.buf)
80 scroll.add(tv)
81 table.attach(scroll, 0, 10, 0, 1)
82
83 # There are 2 ways to use DeployImageDialog
84 # One way is that called by HOB when the 'Deploy Image' button is clicked
85 # The other way is that called by a standalone script.
86 # Following block of codes handles the latter way. It adds a 'Select Image' button and
87 # emit a signal when the button is clicked.
88 if self.standalone:
89 gobject.signal_new("select_image_clicked", self, gobject.SIGNAL_RUN_FIRST,
90 gobject.TYPE_NONE, ())
91 icon = gtk.Image()
92 pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_IMAGES_DISPLAY_FILE)
93 icon.set_from_pixbuf(pix_buffer)
94 button = gtk.Button("Select Image")
95 button.set_image(icon)
96 #button.set_size_request(140, 50)
97 table.attach(button, 9, 10, 1, 2, gtk.FILL, 0, 0, 0)
98 button.connect("clicked", self.select_image_button_clicked_cb)
99
100 separator = gtk.HSeparator()
101 self.vbox.pack_start(separator, expand=False, fill=False, padding=10)
102
103 self.usb_desc = gtk.Label()
104 self.usb_desc.set_alignment(0.0, 0.5)
105 markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
106 self.usb_desc.set_markup(markup)
107
108 self.usb_combo = gtk.combo_box_new_text()
109 self.usb_combo.connect("changed", self.usb_combo_changed_cb)
110 model = self.usb_combo.get_model()
111 model.clear()
112 self.usb_combo.append_text(self.__dummy_usb__)
113 for usb in self.find_all_usb_devices():
114 self.usb_combo.append_text("/dev/" + usb)
115 self.usb_combo.set_active(0)
116 self.vbox.pack_start(self.usb_combo, expand=False, fill=False)
117 self.vbox.pack_start(self.usb_desc, expand=False, fill=False, padding=2)
118
119 self.progress_bar = HobProgressBar()
120 self.vbox.pack_start(self.progress_bar, expand=False, fill=False)
121 separator = gtk.HSeparator()
122 self.vbox.pack_start(separator, expand=False, fill=True, padding=10)
123
124 self.vbox.show_all()
125 self.progress_bar.hide()
126
127 def set_image_text_buffer(self, image_path):
128 self.buf.set_text(image_path)
129
130 def set_image_path(self, image_path):
131 self.image_path = image_path
132
133 def popen_read(self, cmd):
134 tmpout, errors = bb.process.run("%s" % cmd)
135 return tmpout.strip()
136
137 def find_all_usb_devices(self):
138 usb_devs = [ os.readlink(u)
139 for u in glob.glob('/dev/disk/by-id/usb*')
140 if not re.search(r'part\d+', u) ]
141 return [ '%s' % u[u.rfind('/')+1:] for u in usb_devs ]
142
143 def get_usb_info(self, dev):
144 return "%s %s" % \
145 (self.popen_read('cat /sys/class/block/%s/device/vendor' % dev),
146 self.popen_read('cat /sys/class/block/%s/device/model' % dev))
147
148 def select_image_button_clicked_cb(self, button):
149 self.emit('select_image_clicked')
150
151 def usb_combo_changed_cb(self, usb_combo):
152 combo_item = self.usb_combo.get_active_text()
153 if not combo_item or combo_item == self.__dummy_usb__:
154 markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
155 self.usb_desc.set_markup(markup)
156 else:
157 markup = "<span font_desc='12'>" + self.get_usb_info(combo_item.lstrip("/dev/")) + "</span>"
158 self.usb_desc.set_markup(markup)
159
160 def response_cb(self, dialog, response_id):
161 if response_id == gtk.RESPONSE_YES:
162 lbl = ''
163 msg = ''
164 combo_item = self.usb_combo.get_active_text()
165 if combo_item and combo_item != self.__dummy_usb__ and self.image_path:
166 cmdline = bb.ui.crumbs.utils.which_terminal()
167 if cmdline:
168 tmpfile = tempfile.NamedTemporaryFile()
169 cmdline += "\"sudo dd if=" + self.image_path + \
170 " of=" + combo_item + " && sync; echo $? > " + tmpfile.name + "\""
171 subprocess.call(shlex.split(cmdline))
172
173 if int(tmpfile.readline().strip()) == 0:
174 lbl = "<b>Deploy image successfully.</b>"
175 else:
176 lbl = "<b>Failed to deploy image.</b>"
177 msg = "Please check image <b>%s</b> exists and USB device <b>%s</b> is writable." % (self.image_path, combo_item)
178 tmpfile.close()
179 else:
180 if not self.image_path:
181 lbl = "<b>No selection made.</b>"
182 msg = "You have not selected an image to deploy."
183 else:
184 lbl = "<b>No selection made.</b>"
185 msg = "You have not selected a USB device."
186 if len(lbl):
187 crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
188 button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
189 HobButton.style_button(button)
190 crumbs_dialog.run()
191 crumbs_dialog.destroy()
192
193 def update_progress_bar(self, title, fraction, status=None):
194 self.progress_bar.update(fraction)
195 self.progress_bar.set_title(title)
196 self.progress_bar.set_rcstyle(status)
197
198 def write_file(self, ifile, ofile):
199 self.progress_bar.reset()
200 self.progress_bar.show()
201
202 f_from = os.open(ifile, os.O_RDONLY)
203 f_to = os.open(ofile, os.O_WRONLY)
204
205 total_size = os.stat(ifile).st_size
206 written_size = 0
207
208 while True:
209 buf = os.read(f_from, 1024*1024)
210 if not buf:
211 break
212 os.write(f_to, buf)
213 written_size += 1024*1024
214 self.update_progress_bar("Writing to usb:", written_size * 1.0/total_size)
215
216 self.update_progress_bar("Writing completed:", 1.0)
217 os.close(f_from)
218 os.close(f_to)
219 self.progress_bar.hide()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py b/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
new file mode 100644
index 0000000000..21216adc97
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
@@ -0,0 +1,172 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import gobject
25import os
26from bb.ui.crumbs.hobwidget import HobViewTable, HobInfoButton, HobButton, HobAltButton
27from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
28from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
29
30"""
31The following are convenience classes for implementing GNOME HIG compliant
32BitBake GUI's
33In summary: spacing = 12px, border-width = 6px
34"""
35
36class ImageSelectionDialog (CrumbsDialog):
37
38 __columns__ = [{
39 'col_name' : 'Image name',
40 'col_id' : 0,
41 'col_style': 'text',
42 'col_min' : 400,
43 'col_max' : 400
44 }, {
45 'col_name' : 'Select',
46 'col_id' : 1,
47 'col_style': 'radio toggle',
48 'col_min' : 160,
49 'col_max' : 160
50 }]
51
52
53 def __init__(self, image_folder, image_types, title, parent, flags, buttons=None, image_extension = {}):
54 super(ImageSelectionDialog, self).__init__(title, parent, flags, buttons)
55 self.connect("response", self.response_cb)
56
57 self.image_folder = image_folder
58 self.image_types = image_types
59 self.image_list = []
60 self.image_names = []
61 self.image_extension = image_extension
62
63 # create visual elements on the dialog
64 self.create_visual_elements()
65
66 self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
67 self.fill_image_store()
68
69 def create_visual_elements(self):
70 hbox = gtk.HBox(False, 6)
71
72 self.vbox.pack_start(hbox, expand=False, fill=False)
73
74 entry = gtk.Entry()
75 entry.set_text(self.image_folder)
76 table = gtk.Table(1, 10, True)
77 table.set_size_request(560, -1)
78 hbox.pack_start(table, expand=False, fill=False)
79 table.attach(entry, 0, 9, 0, 1)
80 image = gtk.Image()
81 image.set_from_stock(gtk.STOCK_OPEN, gtk.ICON_SIZE_BUTTON)
82 open_button = gtk.Button()
83 open_button.set_image(image)
84 open_button.connect("clicked", self.select_path_cb, self, entry)
85 table.attach(open_button, 9, 10, 0, 1)
86
87 self.image_table = HobViewTable(self.__columns__, "Images")
88 self.image_table.set_size_request(-1, 300)
89 self.image_table.connect("toggled", self.toggled_cb)
90 self.image_table.connect_group_selection(self.table_selected_cb)
91 self.image_table.connect("row-activated", self.row_actived_cb)
92 self.vbox.pack_start(self.image_table, expand=True, fill=True)
93
94 self.show_all()
95
96 def change_image_cb(self, model, path, columnid):
97 if not model:
98 return
99 iter = model.get_iter_first()
100 while iter:
101 rowpath = model.get_path(iter)
102 model[rowpath][columnid] = False
103 iter = model.iter_next(iter)
104
105 model[path][columnid] = True
106
107 def toggled_cb(self, table, cell, path, columnid, tree):
108 model = tree.get_model()
109 self.change_image_cb(model, path, columnid)
110
111 def table_selected_cb(self, selection):
112 model, paths = selection.get_selected_rows()
113 if paths:
114 self.change_image_cb(model, paths[0], 1)
115
116 def row_actived_cb(self, tab, model, path):
117 self.change_image_cb(model, path, 1)
118 self.emit('response', gtk.RESPONSE_YES)
119
120 def select_path_cb(self, action, parent, entry):
121 dialog = gtk.FileChooserDialog("", parent,
122 gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
123 text = entry.get_text()
124 dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
125 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
126 HobAltButton.style_button(button)
127 button = dialog.add_button("Open", gtk.RESPONSE_YES)
128 HobButton.style_button(button)
129 response = dialog.run()
130 if response == gtk.RESPONSE_YES:
131 path = dialog.get_filename()
132 entry.set_text(path)
133 self.image_folder = path
134 self.fill_image_store()
135
136 dialog.destroy()
137
138 def fill_image_store(self):
139 self.image_list = []
140 self.image_store.clear()
141 imageset = set()
142 for root, dirs, files in os.walk(self.image_folder):
143 # ignore the sub directories
144 dirs[:] = []
145 for f in files:
146 for image_type in self.image_types:
147 if image_type in self.image_extension:
148 real_types = self.image_extension[image_type]
149 else:
150 real_types = [image_type]
151 for real_image_type in real_types:
152 if f.endswith('.' + real_image_type):
153 imageset.add(f.rsplit('.' + real_image_type)[0].rsplit('.rootfs')[0])
154 self.image_list.append(f)
155
156 for image in imageset:
157 self.image_store.set(self.image_store.append(), 0, image, 1, False)
158
159 self.image_table.set_model(self.image_store)
160
161 def response_cb(self, dialog, response_id):
162 self.image_names = []
163 if response_id == gtk.RESPONSE_YES:
164 iter = self.image_store.get_iter_first()
165 while iter:
166 path = self.image_store.get_path(iter)
167 if self.image_store[path][1]:
168 for f in self.image_list:
169 if f.startswith(self.image_store[path][0] + '.'):
170 self.image_names.append(f)
171 break
172 iter = self.image_store.iter_next(iter)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py b/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
new file mode 100644
index 0000000000..52d57b6738
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
@@ -0,0 +1,298 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import gobject
25import os
26import tempfile
27from bb.ui.crumbs.hobwidget import hic, HobButton, HobAltButton
28from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
29from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
30
31"""
32The following are convenience classes for implementing GNOME HIG compliant
33BitBake GUI's
34In summary: spacing = 12px, border-width = 6px
35"""
36
37class CellRendererPixbufActivatable(gtk.CellRendererPixbuf):
38 """
39 A custom CellRenderer implementation which is activatable
40 so that we can handle user clicks
41 """
42 __gsignals__ = { 'clicked' : (gobject.SIGNAL_RUN_LAST,
43 gobject.TYPE_NONE,
44 (gobject.TYPE_STRING,)), }
45
46 def __init__(self):
47 gtk.CellRendererPixbuf.__init__(self)
48 self.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
49 self.set_property('follow-state', True)
50
51 """
52 Respond to a user click on a cell
53 """
54 def do_activate(self, even, widget, path, background_area, cell_area, flags):
55 self.emit('clicked', path)
56
57#
58# LayerSelectionDialog
59#
60class LayerSelectionDialog (CrumbsDialog):
61
62 TARGETS = [
63 ("MY_TREE_MODEL_ROW", gtk.TARGET_SAME_WIDGET, 0),
64 ("text/plain", 0, 1),
65 ("TEXT", 0, 2),
66 ("STRING", 0, 3),
67 ]
68
69 def gen_label_widget(self, content):
70 label = gtk.Label()
71 label.set_alignment(0, 0)
72 label.set_markup(content)
73 label.show()
74 return label
75
76 def layer_widget_toggled_cb(self, cell, path, layer_store):
77 name = layer_store[path][0]
78 toggle = not layer_store[path][1]
79 layer_store[path][1] = toggle
80
81 def layer_widget_add_clicked_cb(self, action, layer_store, parent):
82 dialog = gtk.FileChooserDialog("Add new layer", parent,
83 gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
84 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
85 HobAltButton.style_button(button)
86 button = dialog.add_button("Open", gtk.RESPONSE_YES)
87 HobButton.style_button(button)
88 label = gtk.Label("Select the layer you wish to add")
89 label.show()
90 dialog.set_extra_widget(label)
91 response = dialog.run()
92 path = dialog.get_filename()
93 dialog.destroy()
94
95 lbl = "<b>Error</b>"
96 msg = "Unable to load layer <i>%s</i> because " % path
97 if response == gtk.RESPONSE_YES:
98 import os
99 import os.path
100 layers = []
101 it = layer_store.get_iter_first()
102 while it:
103 layers.append(layer_store.get_value(it, 0))
104 it = layer_store.iter_next(it)
105
106 if not path:
107 msg += "it is an invalid path."
108 elif not os.path.exists(path+"/conf/layer.conf"):
109 msg += "there is no layer.conf inside the directory."
110 elif path in layers:
111 msg += "it is already in loaded layers."
112 else:
113 layer_store.append([path])
114 return
115 dialog = CrumbsMessageDialog(parent, lbl, gtk.MESSAGE_ERROR, msg)
116 dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
117 response = dialog.run()
118 dialog.destroy()
119
120 def layer_widget_del_clicked_cb(self, action, tree_selection, layer_store):
121 model, iter = tree_selection.get_selected()
122 if iter:
123 layer_store.remove(iter)
124
125
126 def gen_layer_widget(self, layers, layers_avail, window, tooltip=""):
127 hbox = gtk.HBox(False, 6)
128
129 layer_tv = gtk.TreeView()
130 layer_tv.set_rules_hint(True)
131 layer_tv.set_headers_visible(False)
132 tree_selection = layer_tv.get_selection()
133 tree_selection.set_mode(gtk.SELECTION_SINGLE)
134
135 # Allow enable drag and drop of rows including row move
136 dnd_internal_target = ''
137 dnd_targets = [(dnd_internal_target, gtk.TARGET_SAME_WIDGET, 0)]
138 layer_tv.enable_model_drag_source( gtk.gdk.BUTTON1_MASK,
139 dnd_targets,
140 gtk.gdk.ACTION_MOVE)
141 layer_tv.enable_model_drag_dest(dnd_targets,
142 gtk.gdk.ACTION_MOVE)
143 layer_tv.connect("drag_data_get", self.drag_data_get_cb)
144 layer_tv.connect("drag_data_received", self.drag_data_received_cb)
145
146 col0= gtk.TreeViewColumn('Path')
147 cell0 = gtk.CellRendererText()
148 cell0.set_padding(5,2)
149 col0.pack_start(cell0, True)
150 col0.set_cell_data_func(cell0, self.draw_layer_path_cb)
151 layer_tv.append_column(col0)
152
153 scroll = gtk.ScrolledWindow()
154 scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
155 scroll.set_shadow_type(gtk.SHADOW_IN)
156 scroll.add(layer_tv)
157
158 table_layer = gtk.Table(2, 10, False)
159 hbox.pack_start(table_layer, expand=True, fill=True)
160
161 table_layer.attach(scroll, 0, 10, 0, 1)
162
163 layer_store = gtk.ListStore(gobject.TYPE_STRING)
164 for layer in layers:
165 layer_store.append([layer])
166
167 col1 = gtk.TreeViewColumn('Enabled')
168 layer_tv.append_column(col1)
169
170 cell1 = CellRendererPixbufActivatable()
171 cell1.set_fixed_size(-1,35)
172 cell1.connect("clicked", self.del_cell_clicked_cb, layer_store)
173 col1.pack_start(cell1, True)
174 col1.set_cell_data_func(cell1, self.draw_delete_button_cb, layer_tv)
175
176 add_button = gtk.Button()
177 add_button.set_relief(gtk.RELIEF_NONE)
178 box = gtk.HBox(False, 6)
179 box.show()
180 add_button.add(box)
181 add_button.connect("enter-notify-event", self.add_hover_cb)
182 add_button.connect("leave-notify-event", self.add_leave_cb)
183 self.im = gtk.Image()
184 self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
185 self.im.show()
186 box.pack_start(self.im, expand=False, fill=False, padding=6)
187 lbl = gtk.Label("Add layer")
188 lbl.set_alignment(0.0, 0.5)
189 lbl.show()
190 box.pack_start(lbl, expand=True, fill=True, padding=6)
191 add_button.connect("clicked", self.layer_widget_add_clicked_cb, layer_store, window)
192 table_layer.attach(add_button, 0, 10, 1, 2, gtk.EXPAND | gtk.FILL, 0, 0, 6)
193 layer_tv.set_model(layer_store)
194
195 hbox.show_all()
196
197 return hbox, layer_store
198
199 def drag_data_get_cb(self, treeview, context, selection, target_id, etime):
200 treeselection = treeview.get_selection()
201 model, iter = treeselection.get_selected()
202 data = model.get_value(iter, 0)
203 selection.set(selection.target, 8, data)
204
205 def drag_data_received_cb(self, treeview, context, x, y, selection, info, etime):
206 model = treeview.get_model()
207 data = selection.data
208 drop_info = treeview.get_dest_row_at_pos(x, y)
209 if drop_info:
210 path, position = drop_info
211 iter = model.get_iter(path)
212 if (position == gtk.TREE_VIEW_DROP_BEFORE or position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE):
213 model.insert_before(iter, [data])
214 else:
215 model.insert_after(iter, [data])
216 else:
217 model.append([data])
218 if context.action == gtk.gdk.ACTION_MOVE:
219 context.finish(True, True, etime)
220 return
221
222 def add_hover_cb(self, button, event):
223 self.im.set_from_file(hic.ICON_INDI_ADD_HOVER_FILE)
224
225 def add_leave_cb(self, button, event):
226 self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
227
228 def __init__(self, title, layers, layers_non_removable, all_layers, parent, flags, buttons=None):
229 super(LayerSelectionDialog, self).__init__(title, parent, flags, buttons)
230
231 # class members from other objects
232 self.layers = layers
233 self.layers_non_removable = layers_non_removable
234 self.all_layers = all_layers
235 self.layers_changed = False
236
237 # icon for remove button in TreeView
238 im = gtk.Image()
239 im.set_from_file(hic.ICON_INDI_REMOVE_FILE)
240 self.rem_icon = im.get_pixbuf()
241
242 # class members for internal use
243 self.layer_store = None
244
245 # create visual elements on the dialog
246 self.create_visual_elements()
247 self.connect("response", self.response_cb)
248
249 def create_visual_elements(self):
250 layer_widget, self.layer_store = self.gen_layer_widget(self.layers, self.all_layers, self, None)
251 layer_widget.set_size_request(450, 250)
252 self.vbox.pack_start(layer_widget, expand=True, fill=True)
253 self.show_all()
254
255 def response_cb(self, dialog, response_id):
256 model = self.layer_store
257 it = model.get_iter_first()
258 layers = []
259 while it:
260 layers.append(model.get_value(it, 0))
261 it = model.iter_next(it)
262
263 self.layers_changed = (self.layers != layers)
264 self.layers = layers
265
266 """
267 A custom cell_data_func to draw a delete 'button' in the TreeView for layers
268 other than the meta layer. The deletion of which is prevented so that the
269 user can't shoot themselves in the foot too badly.
270 """
271 def draw_delete_button_cb(self, col, cell, model, it, tv):
272 path = model.get_value(it, 0)
273 if path in self.layers_non_removable:
274 cell.set_sensitive(False)
275 cell.set_property('pixbuf', None)
276 cell.set_property('mode', gtk.CELL_RENDERER_MODE_INERT)
277 else:
278 cell.set_property('pixbuf', self.rem_icon)
279 cell.set_sensitive(True)
280 cell.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
281
282 return True
283
284 """
285 A custom cell_data_func to write an extra message into the layer path cell
286 for the meta layer. We should inform the user that they can't remove it for
287 their own safety.
288 """
289 def draw_layer_path_cb(self, col, cell, model, it):
290 path = model.get_value(it, 0)
291 if path in self.layers_non_removable:
292 cell.set_property('markup', "<b>It cannot be removed</b>\n%s" % path)
293 else:
294 cell.set_property('text', path)
295
296 def del_cell_clicked_cb(self, cell, path, model):
297 it = model.get_iter_from_string(path)
298 model.remove(it)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py
new file mode 100644
index 0000000000..33bac39db8
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py
@@ -0,0 +1,163 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22import gobject
23from bb.ui.crumbs.hobwidget import HobAltButton
24from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
25
26"""
27The following are convenience classes for implementing GNOME HIG compliant
28BitBake GUI's
29In summary: spacing = 12px, border-width = 6px
30"""
31
32#
33# ParsingWarningsDialog
34#
35class ParsingWarningsDialog (CrumbsDialog):
36
37 def __init__(self, title, warnings, parent, flags, buttons=None):
38 super(ParsingWarningsDialog, self).__init__(title, parent, flags, buttons)
39
40 self.warnings = warnings
41 self.warning_on = 0
42 self.warn_nb = len(warnings)
43
44 # create visual elements on the dialog
45 self.create_visual_elements()
46
47 def cancel_button_cb(self, button):
48 self.destroy()
49
50 def previous_button_cb(self, button):
51 self.warning_on = self.warning_on - 1
52 self.refresh_components()
53
54 def next_button_cb(self, button):
55 self.warning_on = self.warning_on + 1
56 self.refresh_components()
57
58 def refresh_components(self):
59 lbl = self.warnings[self.warning_on]
60 #when the warning text has more than 400 chars, it uses a scroll bar
61 if 0<= len(lbl) < 400:
62 self.warning_label.set_size_request(320, 230)
63 self.warning_label.set_use_markup(True)
64 self.warning_label.set_line_wrap(True)
65 self.warning_label.set_markup(lbl)
66 self.warning_label.set_property("yalign", 0.00)
67 else:
68 self.textWindow.set_shadow_type(gtk.SHADOW_IN)
69 self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
70 self.msgView = gtk.TextView()
71 self.msgView.set_editable(False)
72 self.msgView.set_wrap_mode(gtk.WRAP_WORD)
73 self.msgView.set_cursor_visible(False)
74 self.msgView.set_size_request(320, 230)
75 self.buf = gtk.TextBuffer()
76 self.buf.set_text(lbl)
77 self.msgView.set_buffer(self.buf)
78 self.textWindow.add(self.msgView)
79 self.msgView.show()
80
81 if self.warning_on==0:
82 self.previous_button.set_sensitive(False)
83 else:
84 self.previous_button.set_sensitive(True)
85
86 if self.warning_on==self.warn_nb-1:
87 self.next_button.set_sensitive(False)
88 else:
89 self.next_button.set_sensitive(True)
90
91 if self.warn_nb>1:
92 self.heading = "Warning " + str(self.warning_on + 1) + " of " + str(self.warn_nb)
93 self.heading_label.set_markup('<span weight="bold">%s</span>' % self.heading)
94 else:
95 self.heading = "Warning"
96 self.heading_label.set_markup('<span weight="bold">%s</span>' % self.heading)
97
98 self.show_all()
99
100 if 0<= len(lbl) < 400:
101 self.textWindow.hide()
102 else:
103 self.warning_label.hide()
104
105 def create_visual_elements(self):
106 self.set_size_request(350, 350)
107 self.heading_label = gtk.Label()
108 self.heading_label.set_alignment(0, 0)
109 self.warning_label = gtk.Label()
110 self.warning_label.set_selectable(True)
111 self.warning_label.set_alignment(0, 0)
112 self.textWindow = gtk.ScrolledWindow()
113
114 table = gtk.Table(1, 10, False)
115
116 cancel_button = gtk.Button()
117 cancel_button.set_label("Close")
118 cancel_button.connect("clicked", self.cancel_button_cb)
119 cancel_button.set_size_request(110, 30)
120
121 self.previous_button = gtk.Button()
122 image1 = gtk.image_new_from_stock(gtk.STOCK_GO_BACK, gtk.ICON_SIZE_BUTTON)
123 image1.show()
124 box = gtk.HBox(False, 6)
125 box.show()
126 self.previous_button.add(box)
127 lbl = gtk.Label("Previous")
128 lbl.show()
129 box.pack_start(image1, expand=False, fill=False, padding=3)
130 box.pack_start(lbl, expand=True, fill=True, padding=3)
131 self.previous_button.connect("clicked", self.previous_button_cb)
132 self.previous_button.set_size_request(110, 30)
133
134 self.next_button = gtk.Button()
135 image2 = gtk.image_new_from_stock(gtk.STOCK_GO_FORWARD, gtk.ICON_SIZE_BUTTON)
136 image2.show()
137 box = gtk.HBox(False, 6)
138 box.show()
139 self.next_button.add(box)
140 lbl = gtk.Label("Next")
141 lbl.show()
142 box.pack_start(lbl, expand=True, fill=True, padding=3)
143 box.pack_start(image2, expand=False, fill=False, padding=3)
144 self.next_button.connect("clicked", self.next_button_cb)
145 self.next_button.set_size_request(110, 30)
146
147 #when there more than one warning, we need "previous" and "next" button
148 if self.warn_nb>1:
149 self.vbox.pack_start(self.heading_label, expand=False, fill=False)
150 self.vbox.pack_start(self.warning_label, expand=False, fill=False)
151 self.vbox.pack_start(self.textWindow, expand=False, fill=False)
152 table.attach(cancel_button, 6, 7, 0, 1, xoptions=gtk.SHRINK)
153 table.attach(self.previous_button, 7, 8, 0, 1, xoptions=gtk.SHRINK)
154 table.attach(self.next_button, 8, 9, 0, 1, xoptions=gtk.SHRINK)
155 self.vbox.pack_end(table, expand=False, fill=False)
156 else:
157 self.vbox.pack_start(self.heading_label, expand=False, fill=False)
158 self.vbox.pack_start(self.warning_label, expand=False, fill=False)
159 self.vbox.pack_start(self.textWindow, expand=False, fill=False)
160 cancel_button = self.add_button("Close", gtk.RESPONSE_CANCEL)
161 HobAltButton.style_button(cancel_button)
162
163 self.refresh_components()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py b/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
new file mode 100644
index 0000000000..09b9ce6de3
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
@@ -0,0 +1,437 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2013 Intel Corporation
5#
6# Authored by Andrei Dinu <andrei.adrianx.dinu@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import string
22import gtk
23import gobject
24import os
25import tempfile
26import glib
27from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
28from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
29from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
30from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
31
32"""
33The following are convenience classes for implementing GNOME HIG compliant
34BitBake GUI's
35In summary: spacing = 12px, border-width = 6px
36"""
37
38class PropertyDialog(CrumbsDialog):
39
40 def __init__(self, title, parent, information, flags, buttons=None):
41
42 super(PropertyDialog, self).__init__(title, parent, flags, buttons)
43
44 self.properties = information
45
46 if len(self.properties) == 10:
47 self.create_recipe_visual_elements()
48 elif len(self.properties) == 5:
49 self.create_package_visual_elements()
50 else:
51 self.create_information_visual_elements()
52
53
54 def create_information_visual_elements(self):
55
56 HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("icons/"))
57 ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
58
59 self.set_resizable(False)
60
61 self.table = gtk.Table(1,1,False)
62 self.table.set_row_spacings(0)
63 self.table.set_col_spacings(0)
64
65 self.image = gtk.Image()
66 self.image.set_from_file(ICON_PACKAGES_DISPLAY_FILE)
67 self.image.set_property("xalign",0)
68 #self.vbox.add(self.image)
69
70 image_info = self.properties.split("*")[0]
71 info = self.properties.split("*")[1]
72
73 vbox = gtk.VBox(True, spacing=30)
74
75 self.label_short = gtk.Label()
76 self.label_short.set_line_wrap(False)
77 self.label_short.set_markup(image_info)
78 self.label_short.set_property("xalign", 0)
79
80 self.info_label = gtk.Label()
81 self.info_label.set_line_wrap(True)
82 self.info_label.set_markup(info)
83 self.info_label.set_property("yalign", 0.5)
84
85 self.table.attach(self.image, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=5,ypadding=5)
86 self.table.attach(self.label_short, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=5)
87 self.table.attach(self.info_label, 0,1,1,2, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=10)
88
89 self.vbox.add(self.table)
90 self.connect('delete-event', lambda w, e: self.destroy() or True)
91
92 def treeViewTooltip( self, widget, e, tooltips, cell, emptyText="" ):
93 try:
94 (path,col,x,y) = widget.get_path_at_pos( int(e.x), int(e.y) )
95 it = widget.get_model().get_iter(path)
96 value = widget.get_model().get_value(it,cell)
97 if value in self.tooltip_items:
98 tooltips.set_tip(widget, self.tooltip_items[value])
99 tooltips.enable()
100 else:
101 tooltips.set_tip(widget, emptyText)
102 except:
103 tooltips.set_tip(widget, emptyText)
104
105
106 def create_package_visual_elements(self):
107
108 import json
109
110 name = self.properties['name']
111 binb = self.properties['binb']
112 size = self.properties['size']
113 recipe = self.properties['recipe']
114 file_list = json.loads(self.properties['files_list'])
115
116 files_temp = ''
117 paths_temp = ''
118 files_binb = []
119 paths_binb = []
120
121 self.tooltip_items = {}
122
123 self.set_resizable(False)
124
125 #cleaning out the recipe variable
126 recipe = recipe.split("+")[0]
127
128 vbox = gtk.VBox(True,spacing = 0)
129
130 ###################################### NAME ROW + COL #################################
131
132 self.label_short = gtk.Label()
133 self.label_short.set_size_request(300,-1)
134 self.label_short.set_selectable(True)
135 self.label_short.set_line_wrap(True)
136 self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
137 self.label_short.set_property("xalign", 0)
138
139 self.vbox.add(self.label_short)
140
141 ###################################### SIZE ROW + COL ######################################
142
143 self.label_short = gtk.Label()
144 self.label_short.set_size_request(300,-1)
145 self.label_short.set_selectable(True)
146 self.label_short.set_line_wrap(True)
147 self.label_short.set_markup("<span weight=\"bold\">Size: </span>" + size)
148 self.label_short.set_property("xalign", 0)
149
150 self.vbox.add(self.label_short)
151
152 ##################################### RECIPE ROW + COL #########################################
153
154 self.label_short = gtk.Label()
155 self.label_short.set_size_request(300,-1)
156 self.label_short.set_selectable(True)
157 self.label_short.set_line_wrap(True)
158 self.label_short.set_markup("<span weight=\"bold\">Recipe: </span>" + recipe)
159 self.label_short.set_property("xalign", 0)
160
161 self.vbox.add(self.label_short)
162
163 ##################################### BINB ROW + COL #######################################
164
165 if binb != '':
166 self.label_short = gtk.Label()
167 self.label_short.set_selectable(True)
168 self.label_short.set_line_wrap(True)
169 self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
170 self.label_short.set_property("xalign", 0)
171
172 self.label_info = gtk.Label()
173 self.label_info.set_size_request(300,-1)
174 self.label_info.set_selectable(True)
175 self.label_info.set_line_wrap(True)
176 self.label_info.set_markup(binb)
177 self.label_info.set_property("xalign", 0)
178
179 self.vbox.add(self.label_short)
180 self.vbox.add(self.label_info)
181
182 #################################### FILES BROUGHT BY PACKAGES ###################################
183
184 if file_list:
185
186 self.textWindow = gtk.ScrolledWindow()
187 self.textWindow.set_shadow_type(gtk.SHADOW_IN)
188 self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
189 self.textWindow.set_size_request(100, 170)
190
191 packagefiles_store = gtk.ListStore(str)
192
193 self.packagefiles_tv = gtk.TreeView()
194 self.packagefiles_tv.set_rules_hint(True)
195 self.packagefiles_tv.set_headers_visible(True)
196 self.textWindow.add(self.packagefiles_tv)
197
198 self.cell1 = gtk.CellRendererText()
199 col1 = gtk.TreeViewColumn('Package files', self.cell1)
200 col1.set_cell_data_func(self.cell1, self.regex_field)
201 self.packagefiles_tv.append_column(col1)
202
203 items = file_list.keys()
204 items.sort()
205 for item in items:
206 fullpath = item
207 while len(item) > 35:
208 item = item[:len(item)/2] + "" + item[len(item)/2+1:]
209 if len(item) == 35:
210 item = item[:len(item)/2] + "..." + item[len(item)/2+3:]
211 self.tooltip_items[item] = fullpath
212
213 packagefiles_store.append([str(item)])
214
215 self.packagefiles_tv.set_model(packagefiles_store)
216
217 tips = gtk.Tooltips()
218 tips.set_tip(self.packagefiles_tv, "")
219 self.packagefiles_tv.connect("motion-notify-event", self.treeViewTooltip, tips, 0)
220 self.packagefiles_tv.set_events(gtk.gdk.POINTER_MOTION_MASK)
221
222 self.vbox.add(self.textWindow)
223
224 self.vbox.show_all()
225
226
227 def regex_field(self, column, cell, model, iter):
228 cell.set_property('text', model.get_value(iter, 0))
229 return
230
231
232 def create_recipe_visual_elements(self):
233
234 summary = self.properties['summary']
235 name = self.properties['name']
236 version = self.properties['version']
237 revision = self.properties['revision']
238 binb = self.properties['binb']
239 group = self.properties['group']
240 license = self.properties['license']
241 homepage = self.properties['homepage']
242 bugtracker = self.properties['bugtracker']
243 description = self.properties['description']
244
245 self.set_resizable(False)
246
247 #cleaning out the version variable and also the summary
248 version = version.split(":")[1]
249 if len(version) > 30:
250 version = version.split("+")[0]
251 else:
252 version = version.split("-")[0]
253 license = license.replace("&" , "and")
254 if (homepage == ''):
255 homepage = 'unknown'
256 if (bugtracker == ''):
257 bugtracker = 'unknown'
258 summary = summary.split("+")[0]
259
260 #calculating the rows needed for the table
261 binb_items_count = len(binb.split(','))
262 binb_items = binb.split(',')
263
264 vbox = gtk.VBox(False,spacing = 0)
265
266 ######################################## SUMMARY LABEL #########################################
267
268 if summary != '':
269 self.label_short = gtk.Label()
270 self.label_short.set_width_chars(37)
271 self.label_short.set_selectable(True)
272 self.label_short.set_line_wrap(True)
273 self.label_short.set_markup("<b>" + summary + "</b>")
274 self.label_short.set_property("xalign", 0)
275
276 self.vbox.add(self.label_short)
277
278 ########################################## NAME ROW + COL #######################################
279
280 self.label_short = gtk.Label()
281 self.label_short.set_selectable(True)
282 self.label_short.set_line_wrap(True)
283 self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
284 self.label_short.set_property("xalign", 0)
285
286 self.vbox.add(self.label_short)
287
288 ####################################### VERSION ROW + COL ####################################
289
290 self.label_short = gtk.Label()
291 self.label_short.set_selectable(True)
292 self.label_short.set_line_wrap(True)
293 self.label_short.set_markup("<span weight=\"bold\">Version: </span>" + version)
294 self.label_short.set_property("xalign", 0)
295
296 self.vbox.add(self.label_short)
297
298 ##################################### REVISION ROW + COL #####################################
299
300 self.label_short = gtk.Label()
301 self.label_short.set_line_wrap(True)
302 self.label_short.set_selectable(True)
303 self.label_short.set_markup("<span weight=\"bold\">Revision: </span>" + revision)
304 self.label_short.set_property("xalign", 0)
305
306 self.vbox.add(self.label_short)
307
308 ################################## GROUP ROW + COL ############################################
309
310 self.label_short = gtk.Label()
311 self.label_short.set_selectable(True)
312 self.label_short.set_line_wrap(True)
313 self.label_short.set_markup("<span weight=\"bold\">Group: </span>" + group)
314 self.label_short.set_property("xalign", 0)
315
316 self.vbox.add(self.label_short)
317
318 ################################# HOMEPAGE ROW + COL ############################################
319
320 if homepage != 'unknown':
321 self.label_info = gtk.Label()
322 self.label_info.set_selectable(True)
323 self.label_info.set_line_wrap(True)
324 if len(homepage) > 35:
325 self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:35] + "..." + "</a>")
326 else:
327 self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:60] + "</a>")
328
329 self.label_info.set_property("xalign", 0)
330
331 self.label_short = gtk.Label()
332 self.label_short.set_selectable(True)
333 self.label_short.set_line_wrap(True)
334 self.label_short.set_markup("<b>Homepage: </b>")
335 self.label_short.set_property("xalign", 0)
336
337 self.vbox.add(self.label_short)
338 self.vbox.add(self.label_info)
339
340 ################################# BUGTRACKER ROW + COL ###########################################
341
342 if bugtracker != 'unknown':
343 self.label_info = gtk.Label()
344 self.label_info.set_selectable(True)
345 self.label_info.set_line_wrap(True)
346 if len(bugtracker) > 35:
347 self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:35] + "..." + "</a>")
348 else:
349 self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:60] + "</a>")
350 self.label_info.set_property("xalign", 0)
351
352 self.label_short = gtk.Label()
353 self.label_short.set_selectable(True)
354 self.label_short.set_line_wrap(True)
355 self.label_short.set_markup("<b>Bugtracker: </b>")
356 self.label_short.set_property("xalign", 0)
357
358 self.vbox.add(self.label_short)
359 self.vbox.add(self.label_info)
360
361 ################################# LICENSE ROW + COL ############################################
362
363 self.label_info = gtk.Label()
364 self.label_info.set_selectable(True)
365 self.label_info.set_line_wrap(True)
366 self.label_info.set_markup(license)
367 self.label_info.set_property("xalign", 0)
368
369 self.label_short = gtk.Label()
370 self.label_short.set_selectable(True)
371 self.label_short.set_line_wrap(True)
372 self.label_short.set_markup("<span weight=\"bold\">License: </span>")
373 self.label_short.set_property("xalign", 0)
374
375 self.vbox.add(self.label_short)
376 self.vbox.add(self.label_info)
377
378 ################################### BINB ROW+COL #############################################
379
380 if binb != '':
381 self.label_short = gtk.Label()
382 self.label_short.set_selectable(True)
383 self.label_short.set_line_wrap(True)
384 self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
385 self.label_short.set_property("xalign", 0)
386 self.vbox.add(self.label_short)
387 self.label_info = gtk.Label()
388 self.label_info.set_selectable(True)
389 self.label_info.set_width_chars(36)
390 if len(binb) > 200:
391 scrolled_window = gtk.ScrolledWindow()
392 scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
393 scrolled_window.set_size_request(100,100)
394 self.label_info.set_markup(binb)
395 self.label_info.set_padding(6,6)
396 self.label_info.set_alignment(0,0)
397 self.label_info.set_line_wrap(True)
398 scrolled_window.add_with_viewport(self.label_info)
399 self.vbox.add(scrolled_window)
400 else:
401 self.label_info.set_markup(binb)
402 self.label_info.set_property("xalign", 0)
403 self.label_info.set_line_wrap(True)
404 self.vbox.add(self.label_info)
405
406 ################################ DESCRIPTION TAG ROW #################################################
407
408 self.label_short = gtk.Label()
409 self.label_short.set_line_wrap(True)
410 self.label_short.set_markup("<span weight=\"bold\">Description </span>")
411 self.label_short.set_property("xalign", 0)
412 self.vbox.add(self.label_short)
413
414 ################################ DESCRIPTION INFORMATION ROW ##########################################
415
416 hbox = gtk.HBox(True,spacing = 0)
417
418 self.label_short = gtk.Label()
419 self.label_short.set_selectable(True)
420 self.label_short.set_width_chars(36)
421 if len(description) > 200:
422 scrolled_window = gtk.ScrolledWindow()
423 scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
424 scrolled_window.set_size_request(100,100)
425 self.label_short.set_markup(description)
426 self.label_short.set_padding(6,6)
427 self.label_short.set_alignment(0,0)
428 self.label_short.set_line_wrap(True)
429 scrolled_window.add_with_viewport(self.label_short)
430 self.vbox.add(scrolled_window)
431 else:
432 self.label_short.set_markup(description)
433 self.label_short.set_property("xalign", 0)
434 self.label_short.set_line_wrap(True)
435 self.vbox.add(self.label_short)
436
437 self.vbox.show_all()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py
new file mode 100644
index 0000000000..69e7dffb6d
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py
@@ -0,0 +1,90 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
25
26"""
27The following are convenience classes for implementing GNOME HIG compliant
28BitBake GUI's
29In summary: spacing = 12px, border-width = 6px
30"""
31
32class ProxyDetailsDialog (CrumbsDialog):
33
34 def __init__(self, title, user, passwd, parent, flags, buttons=None):
35 super(ProxyDetailsDialog, self).__init__(title, parent, flags, buttons)
36 self.connect("response", self.response_cb)
37
38 self.auth = not (user == None or passwd == None or user == "")
39 self.user = user or ""
40 self.passwd = passwd or ""
41
42 # create visual elements on the dialog
43 self.create_visual_elements()
44
45 def create_visual_elements(self):
46 self.auth_checkbox = gtk.CheckButton("Use authentication")
47 self.auth_checkbox.set_tooltip_text("Check this box to set the username and the password")
48 self.auth_checkbox.set_active(self.auth)
49 self.auth_checkbox.connect("toggled", self.auth_checkbox_toggled_cb)
50 self.vbox.pack_start(self.auth_checkbox, expand=False, fill=False)
51
52 hbox = gtk.HBox(False, 6)
53 self.user_label = gtk.Label("Username:")
54 self.user_text = gtk.Entry()
55 self.user_text.set_text(self.user)
56 hbox.pack_start(self.user_label, expand=False, fill=False)
57 hbox.pack_end(self.user_text, expand=False, fill=False)
58 self.vbox.pack_start(hbox, expand=False, fill=False)
59
60 hbox = gtk.HBox(False, 6)
61 self.passwd_label = gtk.Label("Password:")
62 self.passwd_text = gtk.Entry()
63 self.passwd_text.set_text(self.passwd)
64 hbox.pack_start(self.passwd_label, expand=False, fill=False)
65 hbox.pack_end(self.passwd_text, expand=False, fill=False)
66 self.vbox.pack_start(hbox, expand=False, fill=False)
67
68 self.refresh_auth_components()
69 self.show_all()
70
71 def refresh_auth_components(self):
72 self.user_label.set_sensitive(self.auth)
73 self.user_text.set_editable(self.auth)
74 self.user_text.set_sensitive(self.auth)
75 self.passwd_label.set_sensitive(self.auth)
76 self.passwd_text.set_editable(self.auth)
77 self.passwd_text.set_sensitive(self.auth)
78
79 def auth_checkbox_toggled_cb(self, button):
80 self.auth = self.auth_checkbox.get_active()
81 self.refresh_auth_components()
82
83 def response_cb(self, dialog, response_id):
84 if response_id == gtk.RESPONSE_OK:
85 if self.auth:
86 self.user = self.user_text.get_text()
87 self.passwd = self.passwd_text.get_text()
88 else:
89 self.user = None
90 self.passwd = None
diff --git a/bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py b/bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py
new file mode 100644
index 0000000000..9017139850
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py
@@ -0,0 +1,51 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22
23class RetrieveImageDialog (gtk.FileChooserDialog):
24 """
25 This class is used to create a dialog that permits to retrieve
26 a custom image saved previously from Hob.
27 """
28 def __init__(self, directory,title, parent, flags, buttons=None):
29 super(RetrieveImageDialog, self).__init__(title, None, gtk.FILE_CHOOSER_ACTION_OPEN,
30 (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN, gtk.RESPONSE_OK))
31 self.directory = directory
32
33 # create visual elements on the dialog
34 self.create_visual_elements()
35
36 def create_visual_elements(self):
37 self.set_show_hidden(True)
38 self.set_default_response(gtk.RESPONSE_OK)
39 self.set_current_folder(self.directory)
40
41 vbox = self.get_children()[0].get_children()[0].get_children()[0]
42 for child in vbox.get_children()[0].get_children()[0].get_children()[0].get_children():
43 vbox.get_children()[0].get_children()[0].get_children()[0].remove(child)
44
45 label1 = gtk.Label()
46 label1.set_text("File system" + self.directory)
47 label1.show()
48 vbox.get_children()[0].get_children()[0].get_children()[0].pack_start(label1, expand=False, fill=False, padding=0)
49 vbox.get_children()[0].get_children()[1].get_children()[0].hide()
50
51 self.get_children()[0].get_children()[1].get_children()[0].set_label("Select")
diff --git a/bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py b/bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py
new file mode 100644
index 0000000000..4195f70e1e
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py
@@ -0,0 +1,159 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22import glib
23from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
24from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
25from bb.ui.crumbs.hobwidget import HobButton
26
27class SaveImageDialog (CrumbsDialog):
28 """
29 This class is used to create a dialog that permits to save
30 a custom image in a predefined directory.
31 """
32 def __init__(self, directory, name, description, title, parent, flags, buttons=None):
33 super(SaveImageDialog, self).__init__(title, parent, flags, buttons)
34 self.directory = directory
35 self.builder = parent
36 self.name_field = name
37 self.description_field = description
38
39 # create visual elements on the dialog
40 self.create_visual_elements()
41
42 def create_visual_elements(self):
43 self.set_default_response(gtk.RESPONSE_OK)
44 self.vbox.set_border_width(6)
45
46 sub_vbox = gtk.VBox(False, 12)
47 self.vbox.pack_start(sub_vbox, expand=False, fill=False)
48 label = gtk.Label()
49 label.set_alignment(0, 0)
50 label.set_markup("<b>Name</b>")
51 sub_label = gtk.Label()
52 sub_label.set_alignment(0, 0)
53 content = "Image recipe names should be all lowercase and include only alphanumeric\n"
54 content += "characters. The only special character you can use is the ASCII hyphen (-)."
55 sub_label.set_markup(content)
56 self.name_entry = gtk.Entry()
57 self.name_entry.set_text(self.name_field)
58 self.name_entry.set_size_request(350,30)
59 self.name_entry.connect("changed", self.name_entry_changed)
60 sub_vbox.pack_start(label, expand=False, fill=False)
61 sub_vbox.pack_start(sub_label, expand=False, fill=False)
62 sub_vbox.pack_start(self.name_entry, expand=False, fill=False)
63
64 sub_vbox = gtk.VBox(False, 12)
65 self.vbox.pack_start(sub_vbox, expand=False, fill=False)
66 label = gtk.Label()
67 label.set_alignment(0, 0)
68 label.set_markup("<b>Description</b> (optional)")
69 sub_label = gtk.Label()
70 sub_label.set_alignment(0, 0)
71 sub_label.set_markup("The description should be less than 150 characters long.")
72 self.description_entry = gtk.TextView()
73 description_buffer = self.description_entry.get_buffer()
74 description_buffer.set_text(self.description_field)
75 description_buffer.connect("insert-text", self.limit_description_length)
76 self.description_entry.set_wrap_mode(gtk.WRAP_WORD)
77 self.description_entry.set_size_request(350,50)
78 sub_vbox.pack_start(label, expand=False, fill=False)
79 sub_vbox.pack_start(sub_label, expand=False, fill=False)
80 sub_vbox.pack_start(self.description_entry, expand=False, fill=False)
81
82 sub_vbox = gtk.VBox(False, 12)
83 self.vbox.pack_start(sub_vbox, expand=False, fill=False)
84 label = gtk.Label()
85 label.set_alignment(0, 0)
86 label.set_markup("Your image recipe will be saved to:")
87 sub_label = gtk.Label()
88 sub_label.set_alignment(0, 0)
89 sub_label.set_markup(self.directory)
90 sub_vbox.pack_start(label, expand=False, fill=False)
91 sub_vbox.pack_start(sub_label, expand=False, fill=False)
92
93 table = gtk.Table(1, 4, True)
94
95 cancel_button = gtk.Button()
96 cancel_button.set_label("Cancel")
97 cancel_button.connect("clicked", self.cancel_button_cb)
98 cancel_button.set_size_request(110, 30)
99
100 self.save_button = gtk.Button()
101 self.save_button.set_label("Save")
102 self.save_button.connect("clicked", self.save_button_cb)
103 self.save_button.set_size_request(110, 30)
104 if self.name_entry.get_text() == '':
105 self.save_button.set_sensitive(False)
106
107 table.attach(cancel_button, 2, 3, 0, 1)
108 table.attach(self.save_button, 3, 4, 0, 1)
109 self.vbox.pack_end(table, expand=False, fill=False)
110
111 self.show_all()
112
113 def limit_description_length(self, textbuffer, iter, text, length):
114 buffer_bounds = textbuffer.get_bounds()
115 entire_text = textbuffer.get_text(*buffer_bounds)
116 entire_text += text
117 if len(entire_text)>150 or text=="\n":
118 textbuffer.emit_stop_by_name("insert-text")
119
120 def name_entry_changed(self, entry):
121 text = entry.get_text()
122 if text == '':
123 self.save_button.set_sensitive(False)
124 else:
125 self.save_button.set_sensitive(True)
126
127 def cancel_button_cb(self, button):
128 self.destroy()
129
130 def save_button_cb(self, button):
131 text = self.name_entry.get_text()
132 new_text = text.replace("-","")
133 description_buffer = self.description_entry.get_buffer()
134 description = description_buffer.get_text(description_buffer.get_start_iter(),description_buffer.get_end_iter())
135 if new_text.islower() and new_text.isalnum():
136 self.builder.image_details_page.image_saved = True
137 self.builder.customized = False
138 self.builder.generate_new_image(self.directory+text, description)
139 self.builder.recipe_model.set_in_list(text, description)
140 self.builder.recipe_model.set_selected_image(text)
141 self.builder.image_details_page.show_page(self.builder.IMAGE_GENERATED)
142 self.builder.image_details_page.name_field_template = text
143 self.builder.image_details_page.description_field_template = description
144 self.destroy()
145 else:
146 self.show_invalid_input_error_dialog()
147
148 def show_invalid_input_error_dialog(self):
149 lbl = "<b>Invalid characters in image recipe name</b>"
150 msg = "Image recipe names should be all lowercase and\n"
151 msg += "include only alphanumeric characters. The only\n"
152 msg += "special character you can use is the ASCII hyphen (-)."
153 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_ERROR, msg)
154 button = dialog.add_button("Close", gtk.RESPONSE_OK)
155 HobButton.style_button(button)
156
157 res = dialog.run()
158 self.name_entry.grab_focus()
159 dialog.destroy()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py b/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
new file mode 100644
index 0000000000..e0285c93ce
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
@@ -0,0 +1,122 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import os
25from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton, HobAltButton
26
27"""
28The following are convenience classes for implementing GNOME HIG compliant
29BitBake GUI's
30In summary: spacing = 12px, border-width = 6px
31"""
32
33class SettingsUIHelper():
34
35 def gen_label_widget(self, content):
36 label = gtk.Label()
37 label.set_alignment(0, 0)
38 label.set_markup(content)
39 label.show()
40 return label
41
42 def gen_label_info_widget(self, content, tooltip):
43 table = gtk.Table(1, 10, False)
44 label = self.gen_label_widget(content)
45 info = HobInfoButton(tooltip, self)
46 table.attach(label, 0, 1, 0, 1, xoptions=gtk.FILL)
47 table.attach(info, 1, 2, 0, 1, xoptions=gtk.FILL, xpadding=10)
48 return table
49
50 def gen_spinner_widget(self, content, lower, upper, tooltip=""):
51 hbox = gtk.HBox(False, 12)
52 adjust = gtk.Adjustment(value=content, lower=lower, upper=upper, step_incr=1)
53 spinner = gtk.SpinButton(adjustment=adjust, climb_rate=1, digits=0)
54
55 spinner.set_value(content)
56 hbox.pack_start(spinner, expand=False, fill=False)
57
58 info = HobInfoButton(tooltip, self)
59 hbox.pack_start(info, expand=False, fill=False)
60
61 hbox.show_all()
62 return hbox, spinner
63
64 def gen_combo_widget(self, curr_item, all_item, tooltip=""):
65 hbox = gtk.HBox(False, 12)
66 combo = gtk.combo_box_new_text()
67 hbox.pack_start(combo, expand=False, fill=False)
68
69 index = 0
70 for item in all_item or []:
71 combo.append_text(item)
72 if item == curr_item:
73 combo.set_active(index)
74 index += 1
75
76 info = HobInfoButton(tooltip, self)
77 hbox.pack_start(info, expand=False, fill=False)
78
79 hbox.show_all()
80 return hbox, combo
81
82 def entry_widget_select_path_cb(self, action, parent, entry):
83 dialog = gtk.FileChooserDialog("", parent,
84 gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
85 text = entry.get_text()
86 dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
87 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
88 HobAltButton.style_button(button)
89 button = dialog.add_button("Open", gtk.RESPONSE_YES)
90 HobButton.style_button(button)
91 response = dialog.run()
92 if response == gtk.RESPONSE_YES:
93 path = dialog.get_filename()
94 entry.set_text(path)
95
96 dialog.destroy()
97
98 def gen_entry_widget(self, content, parent, tooltip="", need_button=True):
99 hbox = gtk.HBox(False, 12)
100 entry = gtk.Entry()
101 entry.set_text(content)
102 entry.set_size_request(350,30)
103
104 if need_button:
105 table = gtk.Table(1, 10, False)
106 hbox.pack_start(table, expand=True, fill=True)
107 table.attach(entry, 0, 9, 0, 1, xoptions=gtk.SHRINK)
108 image = gtk.Image()
109 image.set_from_stock(gtk.STOCK_OPEN,gtk.ICON_SIZE_BUTTON)
110 open_button = gtk.Button()
111 open_button.set_image(image)
112 open_button.connect("clicked", self.entry_widget_select_path_cb, parent, entry)
113 table.attach(open_button, 9, 10, 0, 1, xoptions=gtk.SHRINK)
114 else:
115 hbox.pack_start(entry, expand=True, fill=True)
116
117 if tooltip != "":
118 info = HobInfoButton(tooltip, self)
119 hbox.pack_start(info, expand=False, fill=False)
120
121 hbox.show_all()
122 return hbox, entry
diff --git a/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py
new file mode 100644
index 0000000000..ab5b614c8d
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py
@@ -0,0 +1,894 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import gobject
25import hashlib
26from bb.ui.crumbs.hobwidget import hic, HobInfoButton, HobButton, HobAltButton
27from bb.ui.crumbs.progressbar import HobProgressBar
28from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
29from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
30from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
31from bb.ui.crumbs.hig.proxydetailsdialog import ProxyDetailsDialog
32
33"""
34The following are convenience classes for implementing GNOME HIG compliant
35BitBake GUI's
36In summary: spacing = 12px, border-width = 6px
37"""
38
39class SimpleSettingsDialog (CrumbsDialog, SettingsUIHelper):
40
41 (BUILD_ENV_PAGE_ID,
42 SHARED_STATE_PAGE_ID,
43 PROXIES_PAGE_ID,
44 OTHERS_PAGE_ID) = range(4)
45
46 (TEST_NETWORK_NONE,
47 TEST_NETWORK_INITIAL,
48 TEST_NETWORK_RUNNING,
49 TEST_NETWORK_PASSED,
50 TEST_NETWORK_FAILED,
51 TEST_NETWORK_CANCELED) = range(6)
52
53 TARGETS = [
54 ("MY_TREE_MODEL_ROW", gtk.TARGET_SAME_WIDGET, 0),
55 ("text/plain", 0, 1),
56 ("TEXT", 0, 2),
57 ("STRING", 0, 3),
58 ]
59
60 def __init__(self, title, configuration, all_image_types,
61 all_package_formats, all_distros, all_sdk_machines,
62 max_threads, parent, flags, handler, buttons=None):
63 super(SimpleSettingsDialog, self).__init__(title, parent, flags, buttons)
64
65 # class members from other objects
66 # bitbake settings from Builder.Configuration
67 self.configuration = configuration
68 self.image_types = all_image_types
69 self.all_package_formats = all_package_formats
70 self.all_distros = all_distros
71 self.all_sdk_machines = all_sdk_machines
72 self.max_threads = max_threads
73
74 # class members for internal use
75 self.dldir_text = None
76 self.sstatedir_text = None
77 self.sstatemirrors_list = []
78 self.sstatemirrors_changed = 0
79 self.bb_spinner = None
80 self.pmake_spinner = None
81 self.rootfs_size_spinner = None
82 self.extra_size_spinner = None
83 self.gplv3_checkbox = None
84 self.toolchain_checkbox = None
85 self.setting_store = None
86 self.image_types_checkbuttons = {}
87
88 self.md5 = self.config_md5()
89 self.proxy_md5 = self.config_proxy_md5()
90 self.settings_changed = False
91 self.proxy_settings_changed = False
92 self.handler = handler
93 self.proxy_test_ran = False
94 self.selected_mirror_row = 0
95 self.new_mirror = False
96
97 # create visual elements on the dialog
98 self.create_visual_elements()
99 self.connect("response", self.response_cb)
100
101 def _get_sorted_value(self, var):
102 return " ".join(sorted(str(var).split())) + "\n"
103
104 def config_proxy_md5(self):
105 data = ("ENABLE_PROXY: " + self._get_sorted_value(self.configuration.enable_proxy))
106 if self.configuration.enable_proxy:
107 for protocol in self.configuration.proxies.keys():
108 data += (protocol + ": " + self._get_sorted_value(self.configuration.combine_proxy(protocol)))
109 return hashlib.md5(data).hexdigest()
110
111 def config_md5(self):
112 data = ""
113 for key in self.configuration.extra_setting.keys():
114 data += (key + ": " + self._get_sorted_value(self.configuration.extra_setting[key]))
115 return hashlib.md5(data).hexdigest()
116
117 def gen_proxy_entry_widget(self, protocol, parent, need_button=True, line=0):
118 label = gtk.Label(protocol.upper() + " proxy")
119 self.proxy_table.attach(label, 0, 1, line, line+1, xpadding=24)
120
121 proxy_entry = gtk.Entry()
122 proxy_entry.set_size_request(300, -1)
123 self.proxy_table.attach(proxy_entry, 1, 2, line, line+1, ypadding=4)
124
125 self.proxy_table.attach(gtk.Label(":"), 2, 3, line, line+1, xpadding=12, ypadding=4)
126
127 port_entry = gtk.Entry()
128 port_entry.set_size_request(60, -1)
129 self.proxy_table.attach(port_entry, 3, 4, line, line+1, ypadding=4)
130
131 details_button = HobAltButton("Details")
132 details_button.connect("clicked", self.details_cb, parent, protocol)
133 self.proxy_table.attach(details_button, 4, 5, line, line+1, xpadding=4, yoptions=gtk.EXPAND)
134
135 return proxy_entry, port_entry, details_button
136
137 def refresh_proxy_components(self):
138 self.same_checkbox.set_sensitive(self.configuration.enable_proxy)
139
140 self.http_proxy.set_text(self.configuration.combine_host_only("http"))
141 self.http_proxy.set_editable(self.configuration.enable_proxy)
142 self.http_proxy.set_sensitive(self.configuration.enable_proxy)
143 self.http_proxy_port.set_text(self.configuration.combine_port_only("http"))
144 self.http_proxy_port.set_editable(self.configuration.enable_proxy)
145 self.http_proxy_port.set_sensitive(self.configuration.enable_proxy)
146 self.http_proxy_details.set_sensitive(self.configuration.enable_proxy)
147
148 self.https_proxy.set_text(self.configuration.combine_host_only("https"))
149 self.https_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
150 self.https_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
151 self.https_proxy_port.set_text(self.configuration.combine_port_only("https"))
152 self.https_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
153 self.https_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
154 self.https_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
155
156 self.ftp_proxy.set_text(self.configuration.combine_host_only("ftp"))
157 self.ftp_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
158 self.ftp_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
159 self.ftp_proxy_port.set_text(self.configuration.combine_port_only("ftp"))
160 self.ftp_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
161 self.ftp_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
162 self.ftp_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
163
164 self.socks_proxy.set_text(self.configuration.combine_host_only("socks"))
165 self.socks_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
166 self.socks_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
167 self.socks_proxy_port.set_text(self.configuration.combine_port_only("socks"))
168 self.socks_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
169 self.socks_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
170 self.socks_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
171
172 self.cvs_proxy.set_text(self.configuration.combine_host_only("cvs"))
173 self.cvs_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
174 self.cvs_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
175 self.cvs_proxy_port.set_text(self.configuration.combine_port_only("cvs"))
176 self.cvs_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
177 self.cvs_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
178 self.cvs_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
179
180 if self.configuration.same_proxy:
181 if self.http_proxy.get_text():
182 [w.set_text(self.http_proxy.get_text()) for w in self.same_proxy_addresses]
183 if self.http_proxy_port.get_text():
184 [w.set_text(self.http_proxy_port.get_text()) for w in self.same_proxy_ports]
185
186 def proxy_checkbox_toggled_cb(self, button):
187 self.configuration.enable_proxy = self.proxy_checkbox.get_active()
188 if not self.configuration.enable_proxy:
189 self.configuration.same_proxy = False
190 self.same_checkbox.set_active(self.configuration.same_proxy)
191 self.save_proxy_data()
192 self.refresh_proxy_components()
193
194 def same_checkbox_toggled_cb(self, button):
195 self.configuration.same_proxy = self.same_checkbox.get_active()
196 self.save_proxy_data()
197 self.refresh_proxy_components()
198
199 def save_proxy_data(self):
200 self.configuration.split_proxy("http", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
201 if self.configuration.same_proxy:
202 self.configuration.split_proxy("https", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
203 self.configuration.split_proxy("ftp", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
204 self.configuration.split_proxy("socks", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
205 self.configuration.split_proxy("cvs", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
206 else:
207 self.configuration.split_proxy("https", self.https_proxy.get_text() + ":" + self.https_proxy_port.get_text())
208 self.configuration.split_proxy("ftp", self.ftp_proxy.get_text() + ":" + self.ftp_proxy_port.get_text())
209 self.configuration.split_proxy("socks", self.socks_proxy.get_text() + ":" + self.socks_proxy_port.get_text())
210 self.configuration.split_proxy("cvs", self.cvs_proxy.get_text() + ":" + self.cvs_proxy_port.get_text())
211
212 def response_cb(self, dialog, response_id):
213 if response_id == gtk.RESPONSE_YES:
214 if self.proxy_checkbox.get_active():
215 # Check that all proxy entries have a corresponding port
216 for proxy, port in zip(self.all_proxy_addresses, self.all_proxy_ports):
217 if proxy.get_text() and not port.get_text():
218 lbl = "<b>Enter all port numbers</b>"
219 msg = "Proxy servers require a port number. Please make sure you have entered a port number for each proxy server."
220 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
221 button = dialog.add_button("Close", gtk.RESPONSE_OK)
222 HobButton.style_button(button)
223 response = dialog.run()
224 dialog.destroy()
225 self.emit_stop_by_name("response")
226 return
227
228 self.configuration.dldir = self.dldir_text.get_text()
229 self.configuration.sstatedir = self.sstatedir_text.get_text()
230 self.configuration.sstatemirror = ""
231 for mirror in self.sstatemirrors_list:
232 if mirror[1] != "" and mirror[2].startswith("file://"):
233 if mirror[1].endswith("\\1"):
234 smirror = mirror[2] + " " + mirror[1] + " \\n "
235 else:
236 smirror = mirror[2] + " " + mirror[1] + "\\1 \\n "
237 self.configuration.sstatemirror += smirror
238 self.configuration.bbthread = self.bb_spinner.get_value_as_int()
239 self.configuration.pmake = self.pmake_spinner.get_value_as_int()
240 self.save_proxy_data()
241 self.configuration.extra_setting = {}
242 it = self.setting_store.get_iter_first()
243 while it:
244 key = self.setting_store.get_value(it, 0)
245 value = self.setting_store.get_value(it, 1)
246 self.configuration.extra_setting[key] = value
247 it = self.setting_store.iter_next(it)
248
249 md5 = self.config_md5()
250 self.settings_changed = (self.md5 != md5)
251 self.proxy_settings_changed = (self.proxy_md5 != self.config_proxy_md5())
252
253 def create_build_environment_page(self):
254 advanced_vbox = gtk.VBox(False, 6)
255 advanced_vbox.set_border_width(6)
256
257 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Parallel threads</span>'), expand=False, fill=False)
258 sub_vbox = gtk.VBox(False, 6)
259 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
260 label = self.gen_label_widget("BitBake parallel threads")
261 tooltip = "Sets the number of threads that BitBake tasks can simultaneously run. See the <a href=\""
262 tooltip += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
263 tooltip += "poky-ref-manual.html#var-BB_NUMBER_THREADS\">Poky reference manual</a> for information"
264 bbthread_widget, self.bb_spinner = self.gen_spinner_widget(self.configuration.bbthread, 1, self.max_threads,"<b>BitBake prallalel threads</b>" + "*" + tooltip)
265 sub_vbox.pack_start(label, expand=False, fill=False)
266 sub_vbox.pack_start(bbthread_widget, expand=False, fill=False)
267
268 sub_vbox = gtk.VBox(False, 6)
269 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
270 label = self.gen_label_widget("Make parallel threads")
271 tooltip = "Sets the maximum number of threads the host can use during the build. See the <a href=\""
272 tooltip += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
273 tooltip += "poky-ref-manual.html#var-PARALLEL_MAKE\">Poky reference manual</a> for information"
274 pmake_widget, self.pmake_spinner = self.gen_spinner_widget(self.configuration.pmake, 1, self.max_threads,"<b>Make parallel threads</b>" + "*" + tooltip)
275 sub_vbox.pack_start(label, expand=False, fill=False)
276 sub_vbox.pack_start(pmake_widget, expand=False, fill=False)
277
278 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Downloaded source code</span>'), expand=False, fill=False)
279 sub_vbox = gtk.VBox(False, 6)
280 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
281 label = self.gen_label_widget("Downloads directory")
282 tooltip = "Select a folder that caches the upstream project source code"
283 dldir_widget, self.dldir_text = self.gen_entry_widget(self.configuration.dldir, self,"<b>Downloaded source code</b>" + "*" + tooltip)
284 sub_vbox.pack_start(label, expand=False, fill=False)
285 sub_vbox.pack_start(dldir_widget, expand=False, fill=False)
286
287 return advanced_vbox
288
289 def create_shared_state_page(self):
290 advanced_vbox = gtk.VBox(False)
291 advanced_vbox.set_border_width(12)
292
293 sub_vbox = gtk.VBox(False)
294 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False, padding=24)
295 content = "<span>Shared state directory</span>"
296 tooltip = "Select a folder that caches your prebuilt results"
297 label = self.gen_label_info_widget(content,"<b>Shared state directory</b>" + "*" + tooltip)
298 sstatedir_widget, self.sstatedir_text = self.gen_entry_widget(self.configuration.sstatedir, self)
299 sub_vbox.pack_start(label, expand=False, fill=False)
300 sub_vbox.pack_start(sstatedir_widget, expand=False, fill=False, padding=6)
301
302 content = "<span weight=\"bold\">Shared state mirrors</span>"
303 tooltip = "URLs pointing to pre-built mirrors that will speed your build. "
304 tooltip += "Select the \'Standard\' configuration if the structure of your "
305 tooltip += "mirror replicates the structure of your local shared state directory. "
306 tooltip += "For more information on shared state mirrors, check the <a href=\""
307 tooltip += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
308 tooltip += "poky-ref-manual.html#shared-state\">Yocto Project Reference Manual</a>."
309 table = self.gen_label_info_widget(content,"<b>Shared state mirrors</b>" + "*" + tooltip)
310 advanced_vbox.pack_start(table, expand=False, fill=False, padding=6)
311
312 sub_vbox = gtk.VBox(False)
313 advanced_vbox.pack_start(sub_vbox, gtk.TRUE, gtk.TRUE, 0)
314
315 if self.sstatemirrors_changed == 0:
316 self.sstatemirrors_changed = 1
317 sstatemirrors = self.configuration.sstatemirror
318 if sstatemirrors == "":
319 sm_list = ["Standard", "", "file://(.*)"]
320 self.sstatemirrors_list.append(sm_list)
321 else:
322 sstatemirrors = [x for x in sstatemirrors.split('\\n')]
323 for sstatemirror in sstatemirrors:
324 sstatemirror_fields = [x for x in sstatemirror.split(' ') if x.strip()]
325 if len(sstatemirror_fields) == 2:
326 if sstatemirror_fields[0] == "file://(.*)" or sstatemirror_fields[0] == "file://.*":
327 sm_list = ["Standard", sstatemirror_fields[1], sstatemirror_fields[0]]
328 else:
329 sm_list = ["Custom", sstatemirror_fields[1], sstatemirror_fields[0]]
330 self.sstatemirrors_list.append(sm_list)
331
332 sstatemirrors_widget, sstatemirrors_store = self.gen_shared_sstate_widget(self.sstatemirrors_list, self)
333 sub_vbox.pack_start(sstatemirrors_widget, expand=True, fill=True)
334
335 table = gtk.Table(1, 10, False)
336 table.set_col_spacings(6)
337 add_mirror_button = HobAltButton("Add mirror")
338 add_mirror_button.connect("clicked", self.add_mirror)
339 add_mirror_button.set_size_request(120,30)
340 table.attach(add_mirror_button, 1, 2, 0, 1, xoptions=gtk.SHRINK)
341
342 self.delete_button = HobAltButton("Delete mirror")
343 self.delete_button.connect("clicked", self.delete_cb)
344 self.delete_button.set_size_request(120, 30)
345 table.attach(self.delete_button, 3, 4, 0, 1, xoptions=gtk.SHRINK)
346
347 advanced_vbox.pack_start(table, expand=False, fill=False, padding=6)
348
349 return advanced_vbox
350
351 def gen_shared_sstate_widget(self, sstatemirrors_list, window):
352 hbox = gtk.HBox(False)
353
354 sstatemirrors_store = gtk.ListStore(str, str, str)
355 for sstatemirror in sstatemirrors_list:
356 sstatemirrors_store.append(sstatemirror)
357
358 self.sstatemirrors_tv = gtk.TreeView()
359 self.sstatemirrors_tv.set_rules_hint(True)
360 self.sstatemirrors_tv.set_headers_visible(True)
361 tree_selection = self.sstatemirrors_tv.get_selection()
362 tree_selection.set_mode(gtk.SELECTION_SINGLE)
363
364 # Allow enable drag and drop of rows including row move
365 self.sstatemirrors_tv.enable_model_drag_source( gtk.gdk.BUTTON1_MASK,
366 self.TARGETS,
367 gtk.gdk.ACTION_DEFAULT|
368 gtk.gdk.ACTION_MOVE)
369 self.sstatemirrors_tv.enable_model_drag_dest(self.TARGETS,
370 gtk.gdk.ACTION_DEFAULT)
371 self.sstatemirrors_tv.connect("drag_data_get", self.drag_data_get_cb)
372 self.sstatemirrors_tv.connect("drag_data_received", self.drag_data_received_cb)
373
374
375 self.scroll = gtk.ScrolledWindow()
376 self.scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
377 self.scroll.set_shadow_type(gtk.SHADOW_IN)
378 self.scroll.connect('size-allocate', self.scroll_changed)
379 self.scroll.add(self.sstatemirrors_tv)
380
381 #list store for cell renderer
382 m = gtk.ListStore(gobject.TYPE_STRING)
383 m.append(["Standard"])
384 m.append(["Custom"])
385
386 cell0 = gtk.CellRendererCombo()
387 cell0.set_property("model",m)
388 cell0.set_property("text-column", 0)
389 cell0.set_property("editable", True)
390 cell0.set_property("has-entry", False)
391 col0 = gtk.TreeViewColumn("Configuration")
392 col0.pack_start(cell0, False)
393 col0.add_attribute(cell0, "text", 0)
394 col0.set_cell_data_func(cell0, self.configuration_field)
395 self.sstatemirrors_tv.append_column(col0)
396
397 cell0.connect("edited", self.combo_changed, sstatemirrors_store)
398
399 self.cell1 = gtk.CellRendererText()
400 self.cell1.set_padding(5,2)
401 col1 = gtk.TreeViewColumn('Regex', self.cell1)
402 col1.set_cell_data_func(self.cell1, self.regex_field)
403 self.sstatemirrors_tv.append_column(col1)
404
405 self.cell1.connect("edited", self.regex_changed, sstatemirrors_store)
406
407 cell2 = gtk.CellRendererText()
408 cell2.set_padding(5,2)
409 cell2.set_property("editable", True)
410 col2 = gtk.TreeViewColumn('URL', cell2)
411 col2.set_cell_data_func(cell2, self.url_field)
412 self.sstatemirrors_tv.append_column(col2)
413
414 cell2.connect("edited", self.url_changed, sstatemirrors_store)
415
416 self.sstatemirrors_tv.set_model(sstatemirrors_store)
417 self.sstatemirrors_tv.set_cursor(self.selected_mirror_row)
418 hbox.pack_start(self.scroll, expand=True, fill=True)
419 hbox.show_all()
420
421 return hbox, sstatemirrors_store
422
423 def drag_data_get_cb(self, treeview, context, selection, target_id, etime):
424 treeselection = treeview.get_selection()
425 model, iter = treeselection.get_selected()
426 data = model.get_string_from_iter(iter)
427 selection.set(selection.target, 8, data)
428
429 def drag_data_received_cb(self, treeview, context, x, y, selection, info, etime):
430 model = treeview.get_model()
431 data = []
432 tree_iter = model.get_iter_from_string(selection.data)
433 data.append(model.get_value(tree_iter, 0))
434 data.append(model.get_value(tree_iter, 1))
435 data.append(model.get_value(tree_iter, 2))
436
437 drop_info = treeview.get_dest_row_at_pos(x, y)
438 if drop_info:
439 path, position = drop_info
440 iter = model.get_iter(path)
441 if (position == gtk.TREE_VIEW_DROP_BEFORE or position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE):
442 model.insert_before(iter, data)
443 else:
444 model.insert_after(iter, data)
445 else:
446 model.append(data)
447 if context.action == gtk.gdk.ACTION_MOVE:
448 context.finish(True, True, etime)
449 return
450
451 def delete_cb(self, button):
452 selection = self.sstatemirrors_tv.get_selection()
453 tree_model, tree_iter = selection.get_selected()
454 index = int(tree_model.get_string_from_iter(tree_iter))
455 if index == 0:
456 self.selected_mirror_row = index
457 else:
458 self.selected_mirror_row = index - 1
459 self.sstatemirrors_list.pop(index)
460 self.refresh_shared_state_page()
461 if not self.sstatemirrors_list:
462 self.delete_button.set_sensitive(False)
463
464 def add_mirror(self, button):
465 self.new_mirror = True
466 tooltip = "Select the pre-built mirror that will speed your build"
467 index = len(self.sstatemirrors_list)
468 self.selected_mirror_row = index
469 sm_list = ["Standard", "", "file://(.*)"]
470 self.sstatemirrors_list.append(sm_list)
471 self.refresh_shared_state_page()
472
473 def scroll_changed(self, widget, event, data=None):
474 if self.new_mirror == True:
475 adj = widget.get_vadjustment()
476 adj.set_value(adj.upper - adj.page_size)
477 self.new_mirror = False
478
479 def combo_changed(self, widget, path, text, model):
480 model[path][0] = text
481 selection = self.sstatemirrors_tv.get_selection()
482 tree_model, tree_iter = selection.get_selected()
483 index = int(tree_model.get_string_from_iter(tree_iter))
484 self.sstatemirrors_list[index][0] = text
485
486 def regex_changed(self, cell, path, new_text, user_data):
487 user_data[path][2] = new_text
488 selection = self.sstatemirrors_tv.get_selection()
489 tree_model, tree_iter = selection.get_selected()
490 index = int(tree_model.get_string_from_iter(tree_iter))
491 self.sstatemirrors_list[index][2] = new_text
492 return
493
494 def url_changed(self, cell, path, new_text, user_data):
495 if new_text!="Enter the mirror URL" and new_text!="Match regex and replace it with this URL":
496 user_data[path][1] = new_text
497 selection = self.sstatemirrors_tv.get_selection()
498 tree_model, tree_iter = selection.get_selected()
499 index = int(tree_model.get_string_from_iter(tree_iter))
500 self.sstatemirrors_list[index][1] = new_text
501 return
502
503 def configuration_field(self, column, cell, model, iter):
504 cell.set_property('text', model.get_value(iter, 0))
505 if model.get_value(iter, 0) == "Standard":
506 self.cell1.set_property("sensitive", False)
507 self.cell1.set_property("editable", False)
508 else:
509 self.cell1.set_property("sensitive", True)
510 self.cell1.set_property("editable", True)
511 return
512
513 def regex_field(self, column, cell, model, iter):
514 cell.set_property('text', model.get_value(iter, 2))
515 return
516
517 def url_field(self, column, cell, model, iter):
518 text = model.get_value(iter, 1)
519 if text == "":
520 if model.get_value(iter, 0) == "Standard":
521 text = "Enter the mirror URL"
522 else:
523 text = "Match regex and replace it with this URL"
524 cell.set_property('text', text)
525 return
526
527 def refresh_shared_state_page(self):
528 page_num = self.nb.get_current_page()
529 self.nb.remove_page(page_num);
530 self.nb.insert_page(self.create_shared_state_page(), gtk.Label("Shared state"),page_num)
531 self.show_all()
532 self.nb.set_current_page(page_num)
533
534 def test_proxy_ended(self, passed):
535 self.proxy_test_running = False
536 self.set_test_proxy_state(self.TEST_NETWORK_PASSED if passed else self.TEST_NETWORK_FAILED)
537 self.set_sensitive(True)
538 self.refresh_proxy_components()
539
540 def timer_func(self):
541 self.test_proxy_progress.pulse()
542 return self.proxy_test_running
543
544 def test_network_button_cb(self, b):
545 self.set_test_proxy_state(self.TEST_NETWORK_RUNNING)
546 self.set_sensitive(False)
547 self.save_proxy_data()
548 if self.configuration.enable_proxy == True:
549 self.handler.set_http_proxy(self.configuration.combine_proxy("http"))
550 self.handler.set_https_proxy(self.configuration.combine_proxy("https"))
551 self.handler.set_ftp_proxy(self.configuration.combine_proxy("ftp"))
552 self.handler.set_socks_proxy(self.configuration.combine_proxy("socks"))
553 self.handler.set_cvs_proxy(self.configuration.combine_host_only("cvs"), self.configuration.combine_port_only("cvs"))
554 elif self.configuration.enable_proxy == False:
555 self.handler.set_http_proxy("")
556 self.handler.set_https_proxy("")
557 self.handler.set_ftp_proxy("")
558 self.handler.set_socks_proxy("")
559 self.handler.set_cvs_proxy("", "")
560 self.proxy_test_ran = True
561 self.proxy_test_running = True
562 gobject.timeout_add(100, self.timer_func)
563 self.handler.trigger_network_test()
564
565 def test_proxy_focus_event(self, w, direction):
566 if self.test_proxy_state in [self.TEST_NETWORK_PASSED, self.TEST_NETWORK_FAILED]:
567 self.set_test_proxy_state(self.TEST_NETWORK_INITIAL)
568 return False
569
570 def http_proxy_changed(self, e):
571 if not self.configuration.same_proxy:
572 return
573 if e == self.http_proxy:
574 [w.set_text(self.http_proxy.get_text()) for w in self.same_proxy_addresses]
575 else:
576 [w.set_text(self.http_proxy_port.get_text()) for w in self.same_proxy_ports]
577
578 def proxy_address_focus_out_event(self, w, direction):
579 text = w.get_text()
580 if not text:
581 return False
582 if text.find("//") == -1:
583 w.set_text("http://" + text)
584 return False
585
586 def set_test_proxy_state(self, state):
587 if self.test_proxy_state == state:
588 return
589 [self.proxy_table.remove(w) for w in self.test_gui_elements]
590 if state == self.TEST_NETWORK_INITIAL:
591 self.proxy_table.attach(self.test_network_button, 1, 2, 5, 6)
592 self.test_network_button.show()
593 elif state == self.TEST_NETWORK_RUNNING:
594 self.test_proxy_progress.set_rcstyle("running")
595 self.test_proxy_progress.set_text("Testing network configuration")
596 self.proxy_table.attach(self.test_proxy_progress, 0, 5, 5, 6, xpadding=4)
597 self.test_proxy_progress.show()
598 else: # passed or failed
599 self.dummy_progress.update(1.0)
600 if state == self.TEST_NETWORK_PASSED:
601 self.dummy_progress.set_text("Your network is properly configured")
602 self.dummy_progress.set_rcstyle("running")
603 else:
604 self.dummy_progress.set_text("Network test failed")
605 self.dummy_progress.set_rcstyle("fail")
606 self.proxy_table.attach(self.dummy_progress, 0, 4, 5, 6)
607 self.proxy_table.attach(self.retest_network_button, 4, 5, 5, 6, xpadding=4)
608 self.dummy_progress.show()
609 self.retest_network_button.show()
610 self.test_proxy_state = state
611
612 def create_network_page(self):
613 advanced_vbox = gtk.VBox(False, 6)
614 advanced_vbox.set_border_width(6)
615 self.same_proxy_addresses = []
616 self.same_proxy_ports = []
617 self.all_proxy_ports = []
618 self.all_proxy_addresses = []
619
620 sub_vbox = gtk.VBox(False, 6)
621 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
622 label = self.gen_label_widget("<span weight=\"bold\">Set the proxies used when fetching source code</span>")
623 tooltip = "Set the proxies used when fetching source code. A blank field uses a direct internet connection."
624 info = HobInfoButton("<span weight=\"bold\">Set the proxies used when fetching source code</span>" + "*" + tooltip, self)
625 hbox = gtk.HBox(False, 12)
626 hbox.pack_start(label, expand=True, fill=True)
627 hbox.pack_start(info, expand=False, fill=False)
628 sub_vbox.pack_start(hbox, expand=False, fill=False)
629
630 proxy_test_focus = []
631 self.direct_checkbox = gtk.RadioButton(None, "Direct network connection")
632 proxy_test_focus.append(self.direct_checkbox)
633 self.direct_checkbox.set_tooltip_text("Check this box to use a direct internet connection with no proxy")
634 self.direct_checkbox.set_active(not self.configuration.enable_proxy)
635 sub_vbox.pack_start(self.direct_checkbox, expand=False, fill=False)
636
637 self.proxy_checkbox = gtk.RadioButton(self.direct_checkbox, "Manual proxy configuration")
638 proxy_test_focus.append(self.proxy_checkbox)
639 self.proxy_checkbox.set_tooltip_text("Check this box to manually set up a specific proxy")
640 self.proxy_checkbox.set_active(self.configuration.enable_proxy)
641 sub_vbox.pack_start(self.proxy_checkbox, expand=False, fill=False)
642
643 self.same_checkbox = gtk.CheckButton("Use the HTTP proxy for all protocols")
644 proxy_test_focus.append(self.same_checkbox)
645 self.same_checkbox.set_tooltip_text("Check this box to use the HTTP proxy for all five proxies")
646 self.same_checkbox.set_active(self.configuration.same_proxy)
647 hbox = gtk.HBox(False, 12)
648 hbox.pack_start(self.same_checkbox, expand=False, fill=False, padding=24)
649 sub_vbox.pack_start(hbox, expand=False, fill=False)
650
651 self.proxy_table = gtk.Table(6, 5, False)
652 self.http_proxy, self.http_proxy_port, self.http_proxy_details = self.gen_proxy_entry_widget(
653 "http", self, True, 0)
654 proxy_test_focus +=[self.http_proxy, self.http_proxy_port]
655 self.http_proxy.connect("changed", self.http_proxy_changed)
656 self.http_proxy_port.connect("changed", self.http_proxy_changed)
657
658 self.https_proxy, self.https_proxy_port, self.https_proxy_details = self.gen_proxy_entry_widget(
659 "https", self, True, 1)
660 proxy_test_focus += [self.https_proxy, self.https_proxy_port]
661 self.same_proxy_addresses.append(self.https_proxy)
662 self.same_proxy_ports.append(self.https_proxy_port)
663
664 self.ftp_proxy, self.ftp_proxy_port, self.ftp_proxy_details = self.gen_proxy_entry_widget(
665 "ftp", self, True, 2)
666 proxy_test_focus += [self.ftp_proxy, self.ftp_proxy_port]
667 self.same_proxy_addresses.append(self.ftp_proxy)
668 self.same_proxy_ports.append(self.ftp_proxy_port)
669
670 self.socks_proxy, self.socks_proxy_port, self.socks_proxy_details = self.gen_proxy_entry_widget(
671 "socks", self, True, 3)
672 proxy_test_focus += [self.socks_proxy, self.socks_proxy_port]
673 self.same_proxy_addresses.append(self.socks_proxy)
674 self.same_proxy_ports.append(self.socks_proxy_port)
675
676 self.cvs_proxy, self.cvs_proxy_port, self.cvs_proxy_details = self.gen_proxy_entry_widget(
677 "cvs", self, True, 4)
678 proxy_test_focus += [self.cvs_proxy, self.cvs_proxy_port]
679 self.same_proxy_addresses.append(self.cvs_proxy)
680 self.same_proxy_ports.append(self.cvs_proxy_port)
681 self.all_proxy_ports = self.same_proxy_ports + [self.http_proxy_port]
682 self.all_proxy_addresses = self.same_proxy_addresses + [self.http_proxy]
683 sub_vbox.pack_start(self.proxy_table, expand=False, fill=False)
684 self.proxy_table.show_all()
685
686 # Create the graphical elements for the network test feature, but don't display them yet
687 self.test_network_button = HobAltButton("Test network configuration")
688 self.test_network_button.connect("clicked", self.test_network_button_cb)
689 self.test_proxy_progress = HobProgressBar()
690 self.dummy_progress = HobProgressBar()
691 self.retest_network_button = HobAltButton("Retest")
692 self.retest_network_button.connect("clicked", self.test_network_button_cb)
693 self.test_gui_elements = [self.test_network_button, self.test_proxy_progress, self.dummy_progress, self.retest_network_button]
694 # Initialize the network tester
695 self.test_proxy_state = self.TEST_NETWORK_NONE
696 self.set_test_proxy_state(self.TEST_NETWORK_INITIAL)
697 self.proxy_test_passed_id = self.handler.connect("network-passed", lambda h:self.test_proxy_ended(True))
698 self.proxy_test_failed_id = self.handler.connect("network-failed", lambda h:self.test_proxy_ended(False))
699 [w.connect("focus-in-event", self.test_proxy_focus_event) for w in proxy_test_focus]
700 [w.connect("focus-out-event", self.proxy_address_focus_out_event) for w in self.all_proxy_addresses]
701
702 self.direct_checkbox.connect("toggled", self.proxy_checkbox_toggled_cb)
703 self.proxy_checkbox.connect("toggled", self.proxy_checkbox_toggled_cb)
704 self.same_checkbox.connect("toggled", self.same_checkbox_toggled_cb)
705
706 self.refresh_proxy_components()
707 return advanced_vbox
708
709 def switch_to_page(self, page_id):
710 self.nb.set_current_page(page_id)
711
712 def details_cb(self, button, parent, protocol):
713 self.save_proxy_data()
714 dialog = ProxyDetailsDialog(title = protocol.upper() + " Proxy Details",
715 user = self.configuration.proxies[protocol][1],
716 passwd = self.configuration.proxies[protocol][2],
717 parent = parent,
718 flags = gtk.DIALOG_MODAL
719 | gtk.DIALOG_DESTROY_WITH_PARENT
720 | gtk.DIALOG_NO_SEPARATOR)
721 dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
722 response = dialog.run()
723 if response == gtk.RESPONSE_OK:
724 self.configuration.proxies[protocol][1] = dialog.user
725 self.configuration.proxies[protocol][2] = dialog.passwd
726 self.refresh_proxy_components()
727 dialog.destroy()
728
729 def rootfs_combo_changed_cb(self, rootfs_combo, all_package_format, check_hbox):
730 combo_item = self.rootfs_combo.get_active_text()
731 for child in check_hbox.get_children():
732 if isinstance(child, gtk.CheckButton):
733 check_hbox.remove(child)
734 for format in all_package_format:
735 if format != combo_item:
736 check_button = gtk.CheckButton(format)
737 check_hbox.pack_start(check_button, expand=False, fill=False)
738 check_hbox.show_all()
739
740 def gen_pkgfmt_widget(self, curr_package_format, all_package_format, tooltip_combo="", tooltip_extra=""):
741 pkgfmt_hbox = gtk.HBox(False, 24)
742
743 rootfs_vbox = gtk.VBox(False, 6)
744 pkgfmt_hbox.pack_start(rootfs_vbox, expand=False, fill=False)
745
746 label = self.gen_label_widget("Root file system package format")
747 rootfs_vbox.pack_start(label, expand=False, fill=False)
748
749 rootfs_format = ""
750 if curr_package_format:
751 rootfs_format = curr_package_format.split()[0]
752
753 rootfs_format_widget, rootfs_combo = self.gen_combo_widget(rootfs_format, all_package_format, tooltip_combo)
754 rootfs_vbox.pack_start(rootfs_format_widget, expand=False, fill=False)
755
756 extra_vbox = gtk.VBox(False, 6)
757 pkgfmt_hbox.pack_start(extra_vbox, expand=False, fill=False)
758
759 label = self.gen_label_widget("Additional package formats")
760 extra_vbox.pack_start(label, expand=False, fill=False)
761
762 check_hbox = gtk.HBox(False, 12)
763 extra_vbox.pack_start(check_hbox, expand=False, fill=False)
764 for format in all_package_format:
765 if format != rootfs_format:
766 check_button = gtk.CheckButton(format)
767 is_active = (format in curr_package_format.split())
768 check_button.set_active(is_active)
769 check_hbox.pack_start(check_button, expand=False, fill=False)
770
771 info = HobInfoButton(tooltip_extra, self)
772 check_hbox.pack_end(info, expand=False, fill=False)
773
774 rootfs_combo.connect("changed", self.rootfs_combo_changed_cb, all_package_format, check_hbox)
775
776 pkgfmt_hbox.show_all()
777
778 return pkgfmt_hbox, rootfs_combo, check_hbox
779
780 def editable_settings_cell_edited(self, cell, path_string, new_text, model):
781 it = model.get_iter_from_string(path_string)
782 column = cell.get_data("column")
783 model.set(it, column, new_text)
784
785 def editable_settings_add_item_clicked(self, button, model):
786 new_item = ["##KEY##", "##VALUE##"]
787
788 iter = model.append()
789 model.set (iter,
790 0, new_item[0],
791 1, new_item[1],
792 )
793
794 def editable_settings_remove_item_clicked(self, button, treeview):
795 selection = treeview.get_selection()
796 model, iter = selection.get_selected()
797
798 if iter:
799 path = model.get_path(iter)[0]
800 model.remove(iter)
801
802 def gen_editable_settings(self, setting, tooltip=""):
803 setting_hbox = gtk.HBox(False, 12)
804
805 vbox = gtk.VBox(False, 12)
806 setting_hbox.pack_start(vbox, expand=True, fill=True)
807
808 setting_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
809 for key in setting.keys():
810 setting_store.set(setting_store.append(), 0, key, 1, setting[key])
811
812 setting_tree = gtk.TreeView(setting_store)
813 setting_tree.set_headers_visible(True)
814 setting_tree.set_size_request(300, 100)
815
816 col = gtk.TreeViewColumn('Key')
817 col.set_min_width(100)
818 col.set_max_width(150)
819 col.set_resizable(True)
820 col1 = gtk.TreeViewColumn('Value')
821 col1.set_min_width(100)
822 col1.set_max_width(150)
823 col1.set_resizable(True)
824 setting_tree.append_column(col)
825 setting_tree.append_column(col1)
826 cell = gtk.CellRendererText()
827 cell.set_property('width-chars', 10)
828 cell.set_property('editable', True)
829 cell.set_data("column", 0)
830 cell.connect("edited", self.editable_settings_cell_edited, setting_store)
831 cell1 = gtk.CellRendererText()
832 cell1.set_property('width-chars', 10)
833 cell1.set_property('editable', True)
834 cell1.set_data("column", 1)
835 cell1.connect("edited", self.editable_settings_cell_edited, setting_store)
836 col.pack_start(cell, True)
837 col1.pack_end(cell1, True)
838 col.set_attributes(cell, text=0)
839 col1.set_attributes(cell1, text=1)
840
841 scroll = gtk.ScrolledWindow()
842 scroll.set_shadow_type(gtk.SHADOW_IN)
843 scroll.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
844 scroll.add(setting_tree)
845 vbox.pack_start(scroll, expand=True, fill=True)
846
847 # some buttons
848 hbox = gtk.HBox(True, 6)
849 vbox.pack_start(hbox, False, False)
850
851 button = gtk.Button(stock=gtk.STOCK_ADD)
852 button.connect("clicked", self.editable_settings_add_item_clicked, setting_store)
853 hbox.pack_start(button)
854
855 button = gtk.Button(stock=gtk.STOCK_REMOVE)
856 button.connect("clicked", self.editable_settings_remove_item_clicked, setting_tree)
857 hbox.pack_start(button)
858
859 info = HobInfoButton(tooltip, self)
860 setting_hbox.pack_start(info, expand=False, fill=False)
861
862 return setting_hbox, setting_store
863
864 def create_others_page(self):
865 advanced_vbox = gtk.VBox(False, 6)
866 advanced_vbox.set_border_width(6)
867
868 sub_vbox = gtk.VBox(False, 6)
869 advanced_vbox.pack_start(sub_vbox, expand=True, fill=True)
870 label = self.gen_label_widget("<span weight=\"bold\">Add your own variables:</span>")
871 tooltip = "These are key/value pairs for your extra settings. Click \'Add\' and then directly edit the key and the value"
872 setting_widget, self.setting_store = self.gen_editable_settings(self.configuration.extra_setting,"<b>Add your own variables</b>" + "*" + tooltip)
873 sub_vbox.pack_start(label, expand=False, fill=False)
874 sub_vbox.pack_start(setting_widget, expand=True, fill=True)
875
876 return advanced_vbox
877
878 def create_visual_elements(self):
879 self.nb = gtk.Notebook()
880 self.nb.set_show_tabs(True)
881 self.nb.append_page(self.create_build_environment_page(), gtk.Label("Build environment"))
882 self.nb.append_page(self.create_shared_state_page(), gtk.Label("Shared state"))
883 self.nb.append_page(self.create_network_page(), gtk.Label("Network"))
884 self.nb.append_page(self.create_others_page(), gtk.Label("Others"))
885 self.nb.set_current_page(0)
886 self.vbox.pack_start(self.nb, expand=True, fill=True)
887 self.vbox.pack_end(gtk.HSeparator(), expand=True, fill=True)
888
889 self.show_all()
890
891 def destroy(self):
892 self.handler.disconnect(self.proxy_test_passed_id)
893 self.handler.disconnect(self.proxy_test_failed_id)
894 super(SimpleSettingsDialog, self).destroy()
diff --git a/bitbake/lib/bb/ui/crumbs/hobcolor.py b/bitbake/lib/bb/ui/crumbs/hobcolor.py
new file mode 100644
index 0000000000..3316542a20
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hobcolor.py
@@ -0,0 +1,38 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2012 Intel Corporation
5#
6# Authored by Shane Wang <shane.wang@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21class HobColors:
22 WHITE = "#ffffff"
23 PALE_GREEN = "#aaffaa"
24 ORANGE = "#eb8e68"
25 PALE_RED = "#ffaaaa"
26 GRAY = "#aaaaaa"
27 LIGHT_GRAY = "#dddddd"
28 SLIGHT_DARK = "#5f5f5f"
29 DARK = "#3c3b37"
30 BLACK = "#000000"
31 PALE_BLUE = "#53b8ff"
32 DEEP_RED = "#aa3e3e"
33 KHAKI = "#fff68f"
34
35 OK = WHITE
36 RUNNING = PALE_GREEN
37 WARNING = ORANGE
38 ERROR = PALE_RED
diff --git a/bitbake/lib/bb/ui/crumbs/hobeventhandler.py b/bitbake/lib/bb/ui/crumbs/hobeventhandler.py
new file mode 100644
index 0000000000..43edb70b08
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hobeventhandler.py
@@ -0,0 +1,639 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import gobject
23import logging
24import ast
25from bb.ui.crumbs.runningbuild import RunningBuild
26
27class HobHandler(gobject.GObject):
28
29 """
30 This object does BitBake event handling for the hob gui.
31 """
32 __gsignals__ = {
33 "package-formats-updated" : (gobject.SIGNAL_RUN_LAST,
34 gobject.TYPE_NONE,
35 (gobject.TYPE_PYOBJECT,)),
36 "config-updated" : (gobject.SIGNAL_RUN_LAST,
37 gobject.TYPE_NONE,
38 (gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
39 "command-succeeded" : (gobject.SIGNAL_RUN_LAST,
40 gobject.TYPE_NONE,
41 (gobject.TYPE_INT,)),
42 "command-failed" : (gobject.SIGNAL_RUN_LAST,
43 gobject.TYPE_NONE,
44 (gobject.TYPE_STRING,)),
45 "parsing-warning" : (gobject.SIGNAL_RUN_LAST,
46 gobject.TYPE_NONE,
47 (gobject.TYPE_STRING,)),
48 "sanity-failed" : (gobject.SIGNAL_RUN_LAST,
49 gobject.TYPE_NONE,
50 (gobject.TYPE_STRING, gobject.TYPE_INT)),
51 "generating-data" : (gobject.SIGNAL_RUN_LAST,
52 gobject.TYPE_NONE,
53 ()),
54 "data-generated" : (gobject.SIGNAL_RUN_LAST,
55 gobject.TYPE_NONE,
56 ()),
57 "parsing-started" : (gobject.SIGNAL_RUN_LAST,
58 gobject.TYPE_NONE,
59 (gobject.TYPE_PYOBJECT,)),
60 "parsing" : (gobject.SIGNAL_RUN_LAST,
61 gobject.TYPE_NONE,
62 (gobject.TYPE_PYOBJECT,)),
63 "parsing-completed" : (gobject.SIGNAL_RUN_LAST,
64 gobject.TYPE_NONE,
65 (gobject.TYPE_PYOBJECT,)),
66 "recipe-populated" : (gobject.SIGNAL_RUN_LAST,
67 gobject.TYPE_NONE,
68 ()),
69 "package-populated" : (gobject.SIGNAL_RUN_LAST,
70 gobject.TYPE_NONE,
71 ()),
72 "network-passed" : (gobject.SIGNAL_RUN_LAST,
73 gobject.TYPE_NONE,
74 ()),
75 "network-failed" : (gobject.SIGNAL_RUN_LAST,
76 gobject.TYPE_NONE,
77 ()),
78 }
79
80 (GENERATE_CONFIGURATION, GENERATE_RECIPES, GENERATE_PACKAGES, GENERATE_IMAGE, POPULATE_PACKAGEINFO, SANITY_CHECK, NETWORK_TEST) = range(7)
81 (SUB_PATH_LAYERS, SUB_FILES_DISTRO, SUB_FILES_MACH, SUB_FILES_SDKMACH, SUB_MATCH_CLASS, SUB_PARSE_CONFIG, SUB_SANITY_CHECK,
82 SUB_GNERATE_TGTS, SUB_GENERATE_PKGINFO, SUB_BUILD_RECIPES, SUB_BUILD_IMAGE, SUB_NETWORK_TEST) = range(12)
83
84 def __init__(self, server, recipe_model, package_model):
85 super(HobHandler, self).__init__()
86
87 self.build = RunningBuild(sequential=True)
88
89 self.recipe_model = recipe_model
90 self.package_model = package_model
91
92 self.commands_async = []
93 self.generating = False
94 self.current_phase = None
95 self.building = False
96 self.recipe_queue = []
97 self.package_queue = []
98
99 self.server = server
100 self.error_msg = ""
101 self.initcmd = None
102 self.parsing = False
103
104 def set_busy(self):
105 if not self.generating:
106 self.emit("generating-data")
107 self.generating = True
108
109 def clear_busy(self):
110 if self.generating:
111 self.emit("data-generated")
112 self.generating = False
113
114 def runCommand(self, commandline):
115 try:
116 result, error = self.server.runCommand(commandline)
117 if error:
118 raise Exception("Error running command '%s': %s" % (commandline, error))
119 return result
120 except Exception as e:
121 self.commands_async = []
122 self.clear_busy()
123 self.emit("command-failed", "Hob Exception - %s" % (str(e)))
124 return None
125
126 def run_next_command(self, initcmd=None):
127 if initcmd != None:
128 self.initcmd = initcmd
129
130 if self.commands_async:
131 self.set_busy()
132 next_command = self.commands_async.pop(0)
133 else:
134 self.clear_busy()
135 if self.initcmd != None:
136 self.emit("command-succeeded", self.initcmd)
137 return
138
139 if next_command == self.SUB_PATH_LAYERS:
140 self.runCommand(["findConfigFilePath", "bblayers.conf"])
141 elif next_command == self.SUB_FILES_DISTRO:
142 self.runCommand(["findConfigFiles", "DISTRO"])
143 elif next_command == self.SUB_FILES_MACH:
144 self.runCommand(["findConfigFiles", "MACHINE"])
145 elif next_command == self.SUB_FILES_SDKMACH:
146 self.runCommand(["findConfigFiles", "MACHINE-SDK"])
147 elif next_command == self.SUB_MATCH_CLASS:
148 self.runCommand(["findFilesMatchingInDir", "rootfs_", "classes"])
149 elif next_command == self.SUB_PARSE_CONFIG:
150 self.runCommand(["resetCooker"])
151 elif next_command == self.SUB_GNERATE_TGTS:
152 self.runCommand(["generateTargetsTree", "classes/image.bbclass", []])
153 elif next_command == self.SUB_GENERATE_PKGINFO:
154 self.runCommand(["triggerEvent", "bb.event.RequestPackageInfo()"])
155 elif next_command == self.SUB_SANITY_CHECK:
156 self.runCommand(["triggerEvent", "bb.event.SanityCheck()"])
157 elif next_command == self.SUB_NETWORK_TEST:
158 self.runCommand(["triggerEvent", "bb.event.NetworkTest()"])
159 elif next_command == self.SUB_BUILD_RECIPES:
160 self.clear_busy()
161 self.building = True
162 self.runCommand(["buildTargets", self.recipe_queue, self.default_task])
163 self.recipe_queue = []
164 elif next_command == self.SUB_BUILD_IMAGE:
165 self.clear_busy()
166 self.building = True
167 target = self.image
168
169 if self.base_image:
170 # Request the build of a custom image
171 self.generate_hob_base_image(target)
172 self.set_var_in_file("LINGUAS_INSTALL", "", "local.conf")
173 hobImage = self.runCommand(["matchFile", target + ".bb"])
174 if self.base_image != self.recipe_model.__custom_image__:
175 baseImage = self.runCommand(["matchFile", self.base_image + ".bb"])
176 version = self.runCommand(["generateNewImage", hobImage, baseImage, self.package_queue, True, ""])
177 target += version
178 self.recipe_model.set_custom_image_version(version)
179
180 targets = [target]
181 if self.toolchain_packages:
182 self.set_var_in_file("TOOLCHAIN_TARGET_TASK", " ".join(self.toolchain_packages), "local.conf")
183 targets.append(target + ":do_populate_sdk")
184
185 self.runCommand(["buildTargets", targets, self.default_task])
186
187 def display_error(self):
188 self.clear_busy()
189 self.emit("command-failed", self.error_msg)
190 self.error_msg = ""
191 if self.building:
192 self.building = False
193
194 def handle_event(self, event):
195 if not event:
196 return
197 if self.building:
198 self.current_phase = "building"
199 self.build.handle_event(event)
200
201 if isinstance(event, bb.event.PackageInfo):
202 self.package_model.populate(event._pkginfolist)
203 self.emit("package-populated")
204 self.run_next_command()
205
206 elif isinstance(event, bb.event.SanityCheckPassed):
207 reparse = self.runCommand(["getVariable", "BB_INVALIDCONF"]) or None
208 if reparse is True:
209 self.set_var_in_file("BB_INVALIDCONF", False, "local.conf")
210 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
211 self.commands_async.prepend(self.SUB_PARSE_CONFIG)
212 self.run_next_command()
213
214 elif isinstance(event, bb.event.SanityCheckFailed):
215 self.emit("sanity-failed", event._msg, event._network_error)
216
217 elif isinstance(event, logging.LogRecord):
218 if not self.building:
219 if event.levelno >= logging.ERROR:
220 formatter = bb.msg.BBLogFormatter()
221 msg = formatter.format(event)
222 self.error_msg += msg + '\n'
223 elif event.levelno >= logging.WARNING and self.parsing == True:
224 formatter = bb.msg.BBLogFormatter()
225 msg = formatter.format(event)
226 warn_msg = msg + '\n'
227 self.emit("parsing-warning", warn_msg)
228
229 elif isinstance(event, bb.event.TargetsTreeGenerated):
230 self.current_phase = "data generation"
231 if event._model:
232 self.recipe_model.populate(event._model)
233 self.emit("recipe-populated")
234 elif isinstance(event, bb.event.ConfigFilesFound):
235 self.current_phase = "configuration lookup"
236 var = event._variable
237 values = event._values
238 values.sort()
239 self.emit("config-updated", var, values)
240 elif isinstance(event, bb.event.ConfigFilePathFound):
241 self.current_phase = "configuration lookup"
242 elif isinstance(event, bb.event.FilesMatchingFound):
243 self.current_phase = "configuration lookup"
244 # FIXME: hard coding, should at least be a variable shared between
245 # here and the caller
246 if event._pattern == "rootfs_":
247 formats = []
248 for match in event._matches:
249 classname, sep, cls = match.rpartition(".")
250 fs, sep, format = classname.rpartition("_")
251 formats.append(format)
252 formats.sort()
253 self.emit("package-formats-updated", formats)
254 elif isinstance(event, bb.command.CommandCompleted):
255 self.current_phase = None
256 self.run_next_command()
257 elif isinstance(event, bb.command.CommandFailed):
258 if event.error not in ("Forced shutdown", "Stopped build"):
259 self.error_msg += event.error
260 self.commands_async = []
261 self.display_error()
262 elif isinstance(event, (bb.event.ParseStarted,
263 bb.event.CacheLoadStarted,
264 bb.event.TreeDataPreparationStarted,
265 )):
266 message = {}
267 message["eventname"] = bb.event.getName(event)
268 message["current"] = 0
269 message["total"] = None
270 message["title"] = "Parsing recipes"
271 self.emit("parsing-started", message)
272 if isinstance(event, bb.event.ParseStarted):
273 self.parsing = True
274 elif isinstance(event, (bb.event.ParseProgress,
275 bb.event.CacheLoadProgress,
276 bb.event.TreeDataPreparationProgress)):
277 message = {}
278 message["eventname"] = bb.event.getName(event)
279 message["current"] = event.current
280 message["total"] = event.total
281 message["title"] = "Parsing recipes"
282 self.emit("parsing", message)
283 elif isinstance(event, (bb.event.ParseCompleted,
284 bb.event.CacheLoadCompleted,
285 bb.event.TreeDataPreparationCompleted)):
286 message = {}
287 message["eventname"] = bb.event.getName(event)
288 message["current"] = event.total
289 message["total"] = event.total
290 message["title"] = "Parsing recipes"
291 self.emit("parsing-completed", message)
292 if isinstance(event, bb.event.ParseCompleted):
293 self.parsing = False
294 elif isinstance(event, bb.event.NetworkTestFailed):
295 self.emit("network-failed")
296 self.run_next_command()
297 elif isinstance(event, bb.event.NetworkTestPassed):
298 self.emit("network-passed")
299 self.run_next_command()
300
301 if self.error_msg and not self.commands_async:
302 self.display_error()
303
304 return
305
306 def init_cooker(self):
307 self.runCommand(["createConfigFile", ".hob.conf"])
308
309 def set_extra_inherit(self, bbclass):
310 self.append_var_in_file("INHERIT", bbclass, ".hob.conf")
311
312 def set_bblayers(self, bblayers):
313 self.set_var_in_file("BBLAYERS", " ".join(bblayers), "bblayers.conf")
314
315 def set_machine(self, machine):
316 if machine:
317 self.early_assign_var_in_file("MACHINE", machine, "local.conf")
318
319 def set_sdk_machine(self, sdk_machine):
320 self.set_var_in_file("SDKMACHINE", sdk_machine, "local.conf")
321
322 def set_image_fstypes(self, image_fstypes):
323 self.set_var_in_file("IMAGE_FSTYPES", image_fstypes, "local.conf")
324
325 def set_distro(self, distro):
326 self.set_var_in_file("DISTRO", distro, "local.conf")
327
328 def set_package_format(self, format):
329 package_classes = ""
330 for pkgfmt in format.split():
331 package_classes += ("package_%s" % pkgfmt + " ")
332 self.set_var_in_file("PACKAGE_CLASSES", package_classes, "local.conf")
333
334 def set_bbthreads(self, threads):
335 self.set_var_in_file("BB_NUMBER_THREADS", threads, "local.conf")
336
337 def set_pmake(self, threads):
338 pmake = "-j %s" % threads
339 self.set_var_in_file("PARALLEL_MAKE", pmake, "local.conf")
340
341 def set_dl_dir(self, directory):
342 self.set_var_in_file("DL_DIR", directory, "local.conf")
343
344 def set_sstate_dir(self, directory):
345 self.set_var_in_file("SSTATE_DIR", directory, "local.conf")
346
347 def set_sstate_mirrors(self, url):
348 self.set_var_in_file("SSTATE_MIRRORS", url, "local.conf")
349
350 def set_extra_size(self, image_extra_size):
351 self.set_var_in_file("IMAGE_ROOTFS_EXTRA_SPACE", str(image_extra_size), "local.conf")
352
353 def set_rootfs_size(self, image_rootfs_size):
354 self.set_var_in_file("IMAGE_ROOTFS_SIZE", str(image_rootfs_size), "local.conf")
355
356 def set_incompatible_license(self, incompat_license):
357 self.set_var_in_file("INCOMPATIBLE_LICENSE", incompat_license, "local.conf")
358
359 def set_extra_setting(self, extra_setting):
360 self.set_var_in_file("EXTRA_SETTING", extra_setting, "local.conf")
361
362 def set_extra_config(self, extra_setting):
363 old_extra_setting = self.runCommand(["getVariable", "EXTRA_SETTING"]) or {}
364 old_extra_setting = str(old_extra_setting)
365
366 old_extra_setting = ast.literal_eval(old_extra_setting)
367 if not type(old_extra_setting) == dict:
368 old_extra_setting = {}
369
370 # settings not changed
371 if old_extra_setting == extra_setting:
372 return
373
374 # remove the old EXTRA SETTING variable
375 self.remove_var_from_file("EXTRA_SETTING")
376
377 # remove old settings from conf
378 for key in old_extra_setting.keys():
379 if key not in extra_setting:
380 self.remove_var_from_file(key)
381
382 # add new settings
383 for key, value in extra_setting.iteritems():
384 self.set_var_in_file(key, value, "local.conf")
385
386 if extra_setting:
387 self.set_var_in_file("EXTRA_SETTING", extra_setting, "local.conf")
388
389 def set_http_proxy(self, http_proxy):
390 self.set_var_in_file("http_proxy", http_proxy, "local.conf")
391
392 def set_https_proxy(self, https_proxy):
393 self.set_var_in_file("https_proxy", https_proxy, "local.conf")
394
395 def set_ftp_proxy(self, ftp_proxy):
396 self.set_var_in_file("ftp_proxy", ftp_proxy, "local.conf")
397
398 def set_socks_proxy(self, socks_proxy):
399 self.set_var_in_file("all_proxy", socks_proxy, "local.conf")
400
401 def set_cvs_proxy(self, host, port):
402 self.set_var_in_file("CVS_PROXY_HOST", host, "local.conf")
403 self.set_var_in_file("CVS_PROXY_PORT", port, "local.conf")
404
405 def request_package_info(self):
406 self.commands_async.append(self.SUB_GENERATE_PKGINFO)
407 self.run_next_command(self.POPULATE_PACKAGEINFO)
408
409 def trigger_sanity_check(self):
410 self.commands_async.append(self.SUB_SANITY_CHECK)
411 self.run_next_command(self.SANITY_CHECK)
412
413 def trigger_network_test(self):
414 self.commands_async.append(self.SUB_NETWORK_TEST)
415 self.run_next_command(self.NETWORK_TEST)
416
417 def generate_configuration(self):
418 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
419 self.commands_async.append(self.SUB_PARSE_CONFIG)
420 self.commands_async.append(self.SUB_PATH_LAYERS)
421 self.commands_async.append(self.SUB_FILES_DISTRO)
422 self.commands_async.append(self.SUB_FILES_MACH)
423 self.commands_async.append(self.SUB_FILES_SDKMACH)
424 self.commands_async.append(self.SUB_MATCH_CLASS)
425 self.run_next_command(self.GENERATE_CONFIGURATION)
426
427 def generate_recipes(self):
428 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
429 self.commands_async.append(self.SUB_PARSE_CONFIG)
430 self.commands_async.append(self.SUB_GNERATE_TGTS)
431 self.run_next_command(self.GENERATE_RECIPES)
432
433 def generate_packages(self, tgts, default_task="build"):
434 targets = []
435 targets.extend(tgts)
436 self.recipe_queue = targets
437 self.default_task = default_task
438 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
439 self.commands_async.append(self.SUB_PARSE_CONFIG)
440 self.commands_async.append(self.SUB_BUILD_RECIPES)
441 self.run_next_command(self.GENERATE_PACKAGES)
442
443 def generate_image(self, image, base_image, image_packages=[], toolchain_packages=[], default_task="build"):
444 self.image = image
445 self.base_image = base_image
446 self.package_queue = image_packages
447 self.toolchain_packages = toolchain_packages
448 self.default_task = default_task
449 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
450 self.commands_async.append(self.SUB_PARSE_CONFIG)
451 self.commands_async.append(self.SUB_BUILD_IMAGE)
452 self.run_next_command(self.GENERATE_IMAGE)
453
454 def generate_new_image(self, image, base_image, package_queue, description):
455 if base_image:
456 base_image = self.runCommand(["matchFile", self.base_image + ".bb"])
457 self.runCommand(["generateNewImage", image, base_image, package_queue, False, description])
458
459 def generate_hob_base_image(self, hob_image):
460 image_dir = self.get_topdir() + "/recipes/images/"
461 recipe_name = hob_image + ".bb"
462 self.ensure_dir(image_dir)
463 self.generate_new_image(image_dir + recipe_name, None, [], "")
464
465 def ensure_dir(self, directory):
466 self.runCommand(["ensureDir", directory])
467
468 def build_succeeded_async(self):
469 self.building = False
470
471 def build_failed_async(self):
472 self.initcmd = None
473 self.commands_async = []
474 self.building = False
475
476 def cancel_parse(self):
477 self.runCommand(["stateForceShutdown"])
478
479 def cancel_build(self, force=False):
480 if force:
481 # Force the cooker to stop as quickly as possible
482 self.runCommand(["stateForceShutdown"])
483 else:
484 # Wait for tasks to complete before shutting down, this helps
485 # leave the workdir in a usable state
486 self.runCommand(["stateShutdown"])
487
488 def reset_build(self):
489 self.build.reset()
490
491 def get_logfile(self):
492 return self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0]
493
494 def get_topdir(self):
495 return self.runCommand(["getVariable", "TOPDIR"]) or ""
496
497 def _remove_redundant(self, string):
498 ret = []
499 for i in string.split():
500 if i not in ret:
501 ret.append(i)
502 return " ".join(ret)
503
504 def set_var_in_file(self, var, val, default_file=None):
505 self.runCommand(["enableDataTracking"])
506 self.server.runCommand(["setVarFile", var, val, default_file, "set"])
507 self.runCommand(["disableDataTracking"])
508
509 def early_assign_var_in_file(self, var, val, default_file=None):
510 self.runCommand(["enableDataTracking"])
511 self.server.runCommand(["setVarFile", var, val, default_file, "earlyAssign"])
512 self.runCommand(["disableDataTracking"])
513
514 def remove_var_from_file(self, var):
515 self.server.runCommand(["removeVarFile", var])
516
517 def append_var_in_file(self, var, val, default_file=None):
518 self.server.runCommand(["setVarFile", var, val, default_file, "append"])
519
520 def append_to_bbfiles(self, val):
521 bbfiles = self.runCommand(["getVariable", "BBFILES", "False"]) or ""
522 bbfiles = bbfiles.split()
523 if val not in bbfiles:
524 self.append_var_in_file("BBFILES", val, "bblayers.conf")
525
526 def get_parameters(self):
527 # retrieve the parameters from bitbake
528 params = {}
529 params["core_base"] = self.runCommand(["getVariable", "COREBASE"]) or ""
530 params["layer"] = self.runCommand(["getVariable", "BBLAYERS"]) or ""
531 params["layers_non_removable"] = self.runCommand(["getVariable", "BBLAYERS_NON_REMOVABLE"]) or ""
532 params["dldir"] = self.runCommand(["getVariable", "DL_DIR"]) or ""
533 params["machine"] = self.runCommand(["getVariable", "MACHINE"]) or ""
534 params["distro"] = self.runCommand(["getVariable", "DISTRO"]) or "defaultsetup"
535 params["pclass"] = self.runCommand(["getVariable", "PACKAGE_CLASSES"]) or ""
536 params["sstatedir"] = self.runCommand(["getVariable", "SSTATE_DIR"]) or ""
537 params["sstatemirror"] = self.runCommand(["getVariable", "SSTATE_MIRRORS"]) or ""
538
539 num_threads = self.runCommand(["getCpuCount"])
540 if not num_threads:
541 num_threads = 1
542 max_threads = 65536
543 else:
544 try:
545 num_threads = int(num_threads)
546 max_threads = 16 * num_threads
547 except:
548 num_threads = 1
549 max_threads = 65536
550 params["max_threads"] = max_threads
551
552 bbthread = self.runCommand(["getVariable", "BB_NUMBER_THREADS"])
553 if not bbthread:
554 bbthread = num_threads
555 else:
556 try:
557 bbthread = int(bbthread)
558 except:
559 bbthread = num_threads
560 params["bbthread"] = bbthread
561
562 pmake = self.runCommand(["getVariable", "PARALLEL_MAKE"])
563 if not pmake:
564 pmake = num_threads
565 elif isinstance(pmake, int):
566 pass
567 else:
568 try:
569 pmake = int(pmake.lstrip("-j "))
570 except:
571 pmake = num_threads
572 params["pmake"] = "-j %s" % pmake
573
574 params["image_addr"] = self.runCommand(["getVariable", "DEPLOY_DIR_IMAGE"]) or ""
575
576 image_extra_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_EXTRA_SPACE"])
577 if not image_extra_size:
578 image_extra_size = 0
579 else:
580 try:
581 image_extra_size = int(image_extra_size)
582 except:
583 image_extra_size = 0
584 params["image_extra_size"] = image_extra_size
585
586 image_rootfs_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_SIZE"])
587 if not image_rootfs_size:
588 image_rootfs_size = 0
589 else:
590 try:
591 image_rootfs_size = int(image_rootfs_size)
592 except:
593 image_rootfs_size = 0
594 params["image_rootfs_size"] = image_rootfs_size
595
596 image_overhead_factor = self.runCommand(["getVariable", "IMAGE_OVERHEAD_FACTOR"])
597 if not image_overhead_factor:
598 image_overhead_factor = 1
599 else:
600 try:
601 image_overhead_factor = float(image_overhead_factor)
602 except:
603 image_overhead_factor = 1
604 params['image_overhead_factor'] = image_overhead_factor
605
606 params["incompat_license"] = self._remove_redundant(self.runCommand(["getVariable", "INCOMPATIBLE_LICENSE"]) or "")
607 params["sdk_machine"] = self.runCommand(["getVariable", "SDKMACHINE"]) or self.runCommand(["getVariable", "SDK_ARCH"]) or ""
608
609 params["image_fstypes"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_FSTYPES"]) or "")
610
611 params["image_types"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_TYPES"]) or "")
612
613 params["conf_version"] = self.runCommand(["getVariable", "CONF_VERSION"]) or ""
614 params["lconf_version"] = self.runCommand(["getVariable", "LCONF_VERSION"]) or ""
615
616 params["runnable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_IMAGE_TYPES"]) or "")
617 params["runnable_machine_patterns"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_MACHINE_PATTERNS"]) or "")
618 params["deployable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "DEPLOYABLE_IMAGE_TYPES"]) or "")
619 params["kernel_image_type"] = self.runCommand(["getVariable", "KERNEL_IMAGETYPE"]) or ""
620 params["tmpdir"] = self.runCommand(["getVariable", "TMPDIR"]) or ""
621 params["distro_version"] = self.runCommand(["getVariable", "DISTRO_VERSION"]) or ""
622 params["target_os"] = self.runCommand(["getVariable", "TARGET_OS"]) or ""
623 params["target_arch"] = self.runCommand(["getVariable", "TARGET_ARCH"]) or ""
624 params["tune_pkgarch"] = self.runCommand(["getVariable", "TUNE_PKGARCH"]) or ""
625 params["bb_version"] = self.runCommand(["getVariable", "BB_MIN_VERSION"]) or ""
626
627 params["default_task"] = self.runCommand(["getVariable", "BB_DEFAULT_TASK"]) or "build"
628
629 params["socks_proxy"] = self.runCommand(["getVariable", "all_proxy"]) or ""
630 params["http_proxy"] = self.runCommand(["getVariable", "http_proxy"]) or ""
631 params["ftp_proxy"] = self.runCommand(["getVariable", "ftp_proxy"]) or ""
632 params["https_proxy"] = self.runCommand(["getVariable", "https_proxy"]) or ""
633
634 params["cvs_proxy_host"] = self.runCommand(["getVariable", "CVS_PROXY_HOST"]) or ""
635 params["cvs_proxy_port"] = self.runCommand(["getVariable", "CVS_PROXY_PORT"]) or ""
636
637 params["image_white_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_WHITE_PATTERN"]) or ""
638 params["image_black_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_BLACK_PATTERN"]) or ""
639 return params
diff --git a/bitbake/lib/bb/ui/crumbs/hoblistmodel.py b/bitbake/lib/bb/ui/crumbs/hoblistmodel.py
new file mode 100644
index 0000000000..50df156f4d
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hoblistmodel.py
@@ -0,0 +1,903 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import gobject
25from bb.ui.crumbs.hobpages import HobPage
26
27#
28# PackageListModel
29#
30class PackageListModel(gtk.ListStore):
31 """
32 This class defines an gtk.ListStore subclass which will convert the output
33 of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
34 providing convenience functions to access gtk.TreeModel subclasses which
35 provide filtered views of the data.
36 """
37
38 (COL_NAME, COL_VER, COL_REV, COL_RNM, COL_SEC, COL_SUM, COL_RDEP, COL_RPROV, COL_SIZE, COL_RCP, COL_BINB, COL_INC, COL_FADE_INC, COL_FONT, COL_FLIST) = range(15)
39
40 __gsignals__ = {
41 "package-selection-changed" : (gobject.SIGNAL_RUN_LAST,
42 gobject.TYPE_NONE,
43 ()),
44 }
45
46 __toolchain_required_packages__ = ["packagegroup-core-standalone-sdk-target", "packagegroup-core-standalone-sdk-target-dbg"]
47
48 def __init__(self):
49 self.rprov_pkg = {}
50 gtk.ListStore.__init__ (self,
51 gobject.TYPE_STRING,
52 gobject.TYPE_STRING,
53 gobject.TYPE_STRING,
54 gobject.TYPE_STRING,
55 gobject.TYPE_STRING,
56 gobject.TYPE_STRING,
57 gobject.TYPE_STRING,
58 gobject.TYPE_STRING,
59 gobject.TYPE_STRING,
60 gobject.TYPE_STRING,
61 gobject.TYPE_STRING,
62 gobject.TYPE_BOOLEAN,
63 gobject.TYPE_BOOLEAN,
64 gobject.TYPE_STRING,
65 gobject.TYPE_STRING)
66 self.sort_column_id, self.sort_order = PackageListModel.COL_NAME, gtk.SORT_ASCENDING
67
68 """
69 Find the model path for the item_name
70 Returns the path in the model or None
71 """
72 def find_path_for_item(self, item_name):
73 pkg = item_name
74 if item_name not in self.pn_path.keys():
75 if item_name not in self.rprov_pkg.keys():
76 return None
77 pkg = self.rprov_pkg[item_name]
78 if pkg not in self.pn_path.keys():
79 return None
80
81 return self.pn_path[pkg]
82
83 def find_item_for_path(self, item_path):
84 return self[item_path][self.COL_NAME]
85
86 """
87 Helper function to determine whether an item is an item specified by filter
88 """
89 def tree_model_filter(self, model, it, filter):
90 name = model.get_value(it, self.COL_NAME)
91
92 for key in filter.keys():
93 if key == self.COL_NAME:
94 if filter[key] != 'Search packages by name':
95 if name and filter[key] not in name:
96 return False
97 else:
98 if model.get_value(it, key) not in filter[key]:
99 return False
100 self.filtered_nb += 1
101 return True
102
103 """
104 Create, if required, and return a filtered gtk.TreeModelSort
105 containing only the items specified by filter
106 """
107 def tree_model(self, filter, excluded_items_ahead=False, included_items_ahead=False, search_data=None, initial=False):
108 model = self.filter_new()
109 self.filtered_nb = 0
110 model.set_visible_func(self.tree_model_filter, filter)
111
112 sort = gtk.TreeModelSort(model)
113 sort.connect ('sort-column-changed', self.sort_column_changed_cb)
114 if initial:
115 sort.set_sort_column_id(PackageListModel.COL_NAME, gtk.SORT_ASCENDING)
116 sort.set_default_sort_func(None)
117 elif excluded_items_ahead:
118 sort.set_default_sort_func(self.exclude_item_sort_func, search_data)
119 elif included_items_ahead:
120 sort.set_default_sort_func(self.include_item_sort_func, search_data)
121 else:
122 if search_data and search_data!='Search recipes by name' and search_data!='Search package groups by name':
123 sort.set_default_sort_func(self.sort_func, search_data)
124 else:
125 sort.set_sort_column_id(self.sort_column_id, self.sort_order)
126 sort.set_default_sort_func(None)
127
128 sort.set_sort_func(PackageListModel.COL_INC, self.sort_column, PackageListModel.COL_INC)
129 sort.set_sort_func(PackageListModel.COL_SIZE, self.sort_column, PackageListModel.COL_SIZE)
130 sort.set_sort_func(PackageListModel.COL_BINB, self.sort_binb_column)
131 sort.set_sort_func(PackageListModel.COL_RCP, self.sort_column, PackageListModel.COL_RCP)
132 return sort
133
134 def sort_column_changed_cb (self, data):
135 self.sort_column_id, self.sort_order = data.get_sort_column_id ()
136
137 def sort_column(self, model, row1, row2, col):
138 value1 = model.get_value(row1, col)
139 value2 = model.get_value(row2, col)
140 if col==PackageListModel.COL_SIZE:
141 value1 = HobPage._string_to_size(value1)
142 value2 = HobPage._string_to_size(value2)
143
144 cmp_res = cmp(value1, value2)
145 if cmp_res!=0:
146 if col==PackageListModel.COL_INC:
147 return -cmp_res
148 else:
149 return cmp_res
150 else:
151 name1 = model.get_value(row1, PackageListModel.COL_NAME)
152 name2 = model.get_value(row2, PackageListModel.COL_NAME)
153 return cmp(name1,name2)
154
155 def sort_binb_column(self, model, row1, row2):
156 value1 = model.get_value(row1, PackageListModel.COL_BINB)
157 value2 = model.get_value(row2, PackageListModel.COL_BINB)
158 value1_list = value1.split(', ')
159 value2_list = value2.split(', ')
160
161 value1 = value1_list[0]
162 value2 = value2_list[0]
163
164 cmp_res = cmp(value1, value2)
165 if cmp_res==0:
166 cmp_size = cmp(len(value1_list), len(value2_list))
167 if cmp_size==0:
168 name1 = model.get_value(row1, PackageListModel.COL_NAME)
169 name2 = model.get_value(row2, PackageListModel.COL_NAME)
170 return cmp(name1,name2)
171 else:
172 return cmp_size
173 else:
174 return cmp_res
175
176 def exclude_item_sort_func(self, model, iter1, iter2, user_data=None):
177 if user_data:
178 val1 = model.get_value(iter1, PackageListModel.COL_NAME)
179 val2 = model.get_value(iter2, PackageListModel.COL_NAME)
180 return self.cmp_vals(val1, val2, user_data)
181 else:
182 val1 = model.get_value(iter1, PackageListModel.COL_FADE_INC)
183 val2 = model.get_value(iter2, PackageListModel.COL_INC)
184 return ((val1 == True) and (val2 == False))
185
186 def include_item_sort_func(self, model, iter1, iter2, user_data=None):
187 if user_data:
188 val1 = model.get_value(iter1, PackageListModel.COL_NAME)
189 val2 = model.get_value(iter2, PackageListModel.COL_NAME)
190 return self.cmp_vals(val1, val2, user_data)
191 else:
192 val1 = model.get_value(iter1, PackageListModel.COL_INC)
193 val2 = model.get_value(iter2, PackageListModel.COL_INC)
194 return ((val1 == False) and (val2 == True))
195
196 def sort_func(self, model, iter1, iter2, user_data):
197 val1 = model.get_value(iter1, PackageListModel.COL_NAME)
198 val2 = model.get_value(iter2, PackageListModel.COL_NAME)
199 return self.cmp_vals(val1, val2, user_data)
200
201 def cmp_vals(self, val1, val2, user_data):
202 if val1 is None or val2 is None:
203 return 0
204 elif val1.startswith(user_data) and not val2.startswith(user_data):
205 return -1
206 elif not val1.startswith(user_data) and val2.startswith(user_data):
207 return 1
208 else:
209 return cmp(val1, val2)
210
211 def convert_vpath_to_path(self, view_model, view_path):
212 # view_model is the model sorted
213 # get the path of the model filtered
214 filtered_model_path = view_model.convert_path_to_child_path(view_path)
215 # get the model filtered
216 filtered_model = view_model.get_model()
217 # get the path of the original model
218 path = filtered_model.convert_path_to_child_path(filtered_model_path)
219 return path
220
221 def convert_path_to_vpath(self, view_model, path):
222 it = view_model.get_iter_first()
223 while it:
224 name = self.find_item_for_path(path)
225 view_name = view_model.get_value(it, PackageListModel.COL_NAME)
226 if view_name == name:
227 view_path = view_model.get_path(it)
228 return view_path
229 it = view_model.iter_next(it)
230 return None
231
232 """
233 The populate() function takes as input the data from a
234 bb.event.PackageInfo event and populates the package list.
235 """
236 def populate(self, pkginfolist):
237 # First clear the model, in case repopulating
238 self.clear()
239
240 def getpkgvalue(pkgdict, key, pkgname, defaultval = None):
241 value = pkgdict.get('%s_%s' % (key, pkgname), None)
242 if not value:
243 value = pkgdict.get(key, defaultval)
244 return value
245
246 for pkginfo in pkginfolist:
247 pn = pkginfo['PN']
248 pv = pkginfo['PV']
249 pr = pkginfo['PR']
250 pkg = pkginfo['PKG']
251 pkgv = getpkgvalue(pkginfo, 'PKGV', pkg)
252 pkgr = getpkgvalue(pkginfo, 'PKGR', pkg)
253 # PKGSIZE is artificial, will always be overridden with the package name if present
254 pkgsize = int(pkginfo.get('PKGSIZE_%s' % pkg, "0"))
255 # PKG_%s is the renamed version
256 pkg_rename = pkginfo.get('PKG_%s' % pkg, "")
257 # The rest may be overridden or not
258 section = getpkgvalue(pkginfo, 'SECTION', pkg, "")
259 summary = getpkgvalue(pkginfo, 'SUMMARY', pkg, "")
260 rdep = getpkgvalue(pkginfo, 'RDEPENDS', pkg, "")
261 rrec = getpkgvalue(pkginfo, 'RRECOMMENDS', pkg, "")
262 rprov = getpkgvalue(pkginfo, 'RPROVIDES', pkg, "")
263 files_list = getpkgvalue(pkginfo, 'FILES_INFO', pkg, "")
264 for i in rprov.split():
265 self.rprov_pkg[i] = pkg
266
267 recipe = pn + '-' + pv + '-' + pr
268
269 allow_empty = getpkgvalue(pkginfo, 'ALLOW_EMPTY', pkg, "")
270
271 if pkgsize == 0 and not allow_empty:
272 continue
273
274 size = HobPage._size_to_string(pkgsize)
275 self.set(self.append(), self.COL_NAME, pkg, self.COL_VER, pkgv,
276 self.COL_REV, pkgr, self.COL_RNM, pkg_rename,
277 self.COL_SEC, section, self.COL_SUM, summary,
278 self.COL_RDEP, rdep + ' ' + rrec,
279 self.COL_RPROV, rprov, self.COL_SIZE, size,
280 self.COL_RCP, recipe, self.COL_BINB, "",
281 self.COL_INC, False, self.COL_FONT, '10', self.COL_FLIST, files_list)
282
283 self.pn_path = {}
284 it = self.get_iter_first()
285 while it:
286 pn = self.get_value(it, self.COL_NAME)
287 path = self.get_path(it)
288 self.pn_path[pn] = path
289 it = self.iter_next(it)
290
291 """
292 Update the model, send out the notification.
293 """
294 def selection_change_notification(self):
295 self.emit("package-selection-changed")
296
297 """
298 Check whether the item at item_path is included or not
299 """
300 def path_included(self, item_path):
301 return self[item_path][self.COL_INC]
302
303 """
304 Add this item, and any of its dependencies, to the image contents
305 """
306 def include_item(self, item_path, binb=""):
307 if self.path_included(item_path):
308 return
309
310 item_name = self[item_path][self.COL_NAME]
311 item_deps = self[item_path][self.COL_RDEP]
312
313 self[item_path][self.COL_INC] = True
314
315 item_bin = self[item_path][self.COL_BINB].split(', ')
316 if binb and not binb in item_bin:
317 item_bin.append(binb)
318 self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
319
320 if item_deps:
321 # Ensure all of the items deps are included and, where appropriate,
322 # add this item to their COL_BINB
323 for dep in item_deps.split(" "):
324 if dep.startswith('('):
325 continue
326 # If the contents model doesn't already contain dep, add it
327 dep_path = self.find_path_for_item(dep)
328 if not dep_path:
329 continue
330 dep_included = self.path_included(dep_path)
331
332 if dep_included and not dep in item_bin:
333 # don't set the COL_BINB to this item if the target is an
334 # item in our own COL_BINB
335 dep_bin = self[dep_path][self.COL_BINB].split(', ')
336 if not item_name in dep_bin:
337 dep_bin.append(item_name)
338 self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
339 elif not dep_included:
340 self.include_item(dep_path, binb=item_name)
341
342 def exclude_item(self, item_path):
343 if not self.path_included(item_path):
344 return
345
346 self[item_path][self.COL_INC] = False
347
348 item_name = self[item_path][self.COL_NAME]
349 item_deps = self[item_path][self.COL_RDEP]
350 if item_deps:
351 for dep in item_deps.split(" "):
352 if dep.startswith('('):
353 continue
354 dep_path = self.find_path_for_item(dep)
355 if not dep_path:
356 continue
357 dep_bin = self[dep_path][self.COL_BINB].split(', ')
358 if item_name in dep_bin:
359 dep_bin.remove(item_name)
360 self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
361
362 item_bin = self[item_path][self.COL_BINB].split(', ')
363 if item_bin:
364 for binb in item_bin:
365 binb_path = self.find_path_for_item(binb)
366 if not binb_path:
367 continue
368 self.exclude_item(binb_path)
369
370 """
371 Empty self.contents by setting the include of each entry to None
372 """
373 def reset(self):
374 it = self.get_iter_first()
375 while it:
376 self.set(it,
377 self.COL_INC, False,
378 self.COL_BINB, "")
379 it = self.iter_next(it)
380
381 self.selection_change_notification()
382
383 def get_selected_packages(self):
384 packagelist = []
385
386 it = self.get_iter_first()
387 while it:
388 if self.get_value(it, self.COL_INC):
389 name = self.get_value(it, self.COL_NAME)
390 packagelist.append(name)
391 it = self.iter_next(it)
392
393 return packagelist
394
395 def get_user_selected_packages(self):
396 packagelist = []
397
398 it = self.get_iter_first()
399 while it:
400 if self.get_value(it, self.COL_INC):
401 binb = self.get_value(it, self.COL_BINB)
402 if binb == "User Selected":
403 name = self.get_value(it, self.COL_NAME)
404 packagelist.append(name)
405 it = self.iter_next(it)
406
407 return packagelist
408
409 def get_selected_packages_toolchain(self):
410 packagelist = []
411
412 it = self.get_iter_first()
413 while it:
414 if self.get_value(it, self.COL_INC):
415 name = self.get_value(it, self.COL_NAME)
416 if name.endswith("-dev") or name.endswith("-dbg"):
417 packagelist.append(name)
418 it = self.iter_next(it)
419
420 return list(set(packagelist + self.__toolchain_required_packages__));
421
422 """
423 Package model may be incomplete, therefore when calling the
424 set_selected_packages(), some packages will not be set included.
425 Return the un-set packages list.
426 """
427 def set_selected_packages(self, packagelist, user_selected=False):
428 left = []
429 binb = 'User Selected' if user_selected else ''
430 for pn in packagelist:
431 if pn in self.pn_path.keys():
432 path = self.pn_path[pn]
433 self.include_item(item_path=path, binb=binb)
434 else:
435 left.append(pn)
436
437 self.selection_change_notification()
438 return left
439
440 """
441 Return the selected package size, unit is B.
442 """
443 def get_packages_size(self):
444 packages_size = 0
445 it = self.get_iter_first()
446 while it:
447 if self.get_value(it, self.COL_INC):
448 str_size = self.get_value(it, self.COL_SIZE)
449 if not str_size:
450 continue
451
452 packages_size += HobPage._string_to_size(str_size)
453
454 it = self.iter_next(it)
455 return packages_size
456
457 """
458 Resync the state of included items to a backup column before performing the fadeout visible effect
459 """
460 def resync_fadeout_column(self, model_first_iter=None):
461 it = model_first_iter
462 while it:
463 active = self.get_value(it, self.COL_INC)
464 self.set(it, self.COL_FADE_INC, active)
465 it = self.iter_next(it)
466
467#
468# RecipeListModel
469#
470class RecipeListModel(gtk.ListStore):
471 """
472 This class defines an gtk.ListStore subclass which will convert the output
473 of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
474 providing convenience functions to access gtk.TreeModel subclasses which
475 provide filtered views of the data.
476 """
477 (COL_NAME, COL_DESC, COL_LIC, COL_GROUP, COL_DEPS, COL_BINB, COL_TYPE, COL_INC, COL_IMG, COL_INSTALL, COL_PN, COL_FADE_INC, COL_SUMMARY, COL_VERSION,
478 COL_REVISION, COL_HOMEPAGE, COL_BUGTRACKER, COL_FILE) = range(18)
479
480 __custom_image__ = "Start with an empty image recipe"
481
482 __gsignals__ = {
483 "recipe-selection-changed" : (gobject.SIGNAL_RUN_LAST,
484 gobject.TYPE_NONE,
485 ()),
486 }
487
488 """
489 """
490 def __init__(self):
491 gtk.ListStore.__init__ (self,
492 gobject.TYPE_STRING,
493 gobject.TYPE_STRING,
494 gobject.TYPE_STRING,
495 gobject.TYPE_STRING,
496 gobject.TYPE_STRING,
497 gobject.TYPE_STRING,
498 gobject.TYPE_STRING,
499 gobject.TYPE_BOOLEAN,
500 gobject.TYPE_BOOLEAN,
501 gobject.TYPE_STRING,
502 gobject.TYPE_STRING,
503 gobject.TYPE_BOOLEAN,
504 gobject.TYPE_STRING,
505 gobject.TYPE_STRING,
506 gobject.TYPE_STRING,
507 gobject.TYPE_STRING,
508 gobject.TYPE_STRING,
509 gobject.TYPE_STRING)
510 self.sort_column_id, self.sort_order = RecipeListModel.COL_NAME, gtk.SORT_ASCENDING
511
512 """
513 Find the model path for the item_name
514 Returns the path in the model or None
515 """
516 def find_path_for_item(self, item_name):
517 if self.non_target_name(item_name) or item_name not in self.pn_path.keys():
518 return None
519 else:
520 return self.pn_path[item_name]
521
522 def find_item_for_path(self, item_path):
523 return self[item_path][self.COL_NAME]
524
525 """
526 Helper method to determine whether name is a target pn
527 """
528 def non_target_name(self, name):
529 if name and ('-native' in name):
530 return True
531 return False
532
533 """
534 Helper function to determine whether an item is an item specified by filter
535 """
536 def tree_model_filter(self, model, it, filter):
537 name = model.get_value(it, self.COL_NAME)
538 if self.non_target_name(name):
539 return False
540
541 for key in filter.keys():
542 if key == self.COL_NAME:
543 if filter[key] != 'Search recipes by name' and filter[key] != 'Search package groups by name':
544 if filter[key] not in name:
545 return False
546 else:
547 if model.get_value(it, key) not in filter[key]:
548 return False
549 self.filtered_nb += 1
550
551 return True
552
553 def exclude_item_sort_func(self, model, iter1, iter2, user_data=None):
554 if user_data:
555 val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
556 val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
557 return self.cmp_vals(val1, val2, user_data)
558 else:
559 val1 = model.get_value(iter1, RecipeListModel.COL_FADE_INC)
560 val2 = model.get_value(iter2, RecipeListModel.COL_INC)
561 return ((val1 == True) and (val2 == False))
562
563 def include_item_sort_func(self, model, iter1, iter2, user_data=None):
564 if user_data:
565 val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
566 val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
567 return self.cmp_vals(val1, val2, user_data)
568 else:
569 val1 = model.get_value(iter1, RecipeListModel.COL_INC)
570 val2 = model.get_value(iter2, RecipeListModel.COL_INC)
571 return ((val1 == False) and (val2 == True))
572
573 def sort_func(self, model, iter1, iter2, user_data):
574 val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
575 val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
576 return self.cmp_vals(val1, val2, user_data)
577
578 def cmp_vals(self, val1, val2, user_data):
579 if val1 is None or val2 is None:
580 return 0
581 elif val1.startswith(user_data) and not val2.startswith(user_data):
582 return -1
583 elif not val1.startswith(user_data) and val2.startswith(user_data):
584 return 1
585 else:
586 return cmp(val1, val2)
587
588 """
589 Create, if required, and return a filtered gtk.TreeModelSort
590 containing only the items specified by filter
591 """
592 def tree_model(self, filter, excluded_items_ahead=False, included_items_ahead=False, search_data=None, initial=False):
593 model = self.filter_new()
594 self.filtered_nb = 0
595 model.set_visible_func(self.tree_model_filter, filter)
596
597 sort = gtk.TreeModelSort(model)
598 sort.connect ('sort-column-changed', self.sort_column_changed_cb)
599 if initial:
600 sort.set_sort_column_id(RecipeListModel.COL_NAME, gtk.SORT_ASCENDING)
601 sort.set_default_sort_func(None)
602 elif excluded_items_ahead:
603 sort.set_default_sort_func(self.exclude_item_sort_func, search_data)
604 elif included_items_ahead:
605 sort.set_default_sort_func(self.include_item_sort_func, search_data)
606 else:
607 if search_data and search_data!='Search recipes by name' and search_data!='Search package groups by name':
608 sort.set_default_sort_func(self.sort_func, search_data)
609 else:
610 sort.set_sort_column_id(self.sort_column_id, self.sort_order)
611 sort.set_default_sort_func(None)
612
613 sort.set_sort_func(RecipeListModel.COL_INC, self.sort_column, RecipeListModel.COL_INC)
614 sort.set_sort_func(RecipeListModel.COL_GROUP, self.sort_column, RecipeListModel.COL_GROUP)
615 sort.set_sort_func(RecipeListModel.COL_BINB, self.sort_binb_column)
616 sort.set_sort_func(RecipeListModel.COL_LIC, self.sort_column, RecipeListModel.COL_LIC)
617 return sort
618
619 def sort_column_changed_cb (self, data):
620 self.sort_column_id, self.sort_order = data.get_sort_column_id ()
621
622 def sort_column(self, model, row1, row2, col):
623 value1 = model.get_value(row1, col)
624 value2 = model.get_value(row2, col)
625 cmp_res = cmp(value1, value2)
626 if cmp_res!=0:
627 if col==RecipeListModel.COL_INC:
628 return -cmp_res
629 else:
630 return cmp_res
631 else:
632 name1 = model.get_value(row1, RecipeListModel.COL_NAME)
633 name2 = model.get_value(row2, RecipeListModel.COL_NAME)
634 return cmp(name1,name2)
635
636 def sort_binb_column(self, model, row1, row2):
637 value1 = model.get_value(row1, RecipeListModel.COL_BINB)
638 value2 = model.get_value(row2, RecipeListModel.COL_BINB)
639 value1_list = value1.split(', ')
640 value2_list = value2.split(', ')
641
642 value1 = value1_list[0]
643 value2 = value2_list[0]
644
645 cmp_res = cmp(value1, value2)
646 if cmp_res==0:
647 cmp_size = cmp(len(value1_list), len(value2_list))
648 if cmp_size==0:
649 name1 = model.get_value(row1, RecipeListModel.COL_NAME)
650 name2 = model.get_value(row2, RecipeListModel.COL_NAME)
651 return cmp(name1,name2)
652 else:
653 return cmp_size
654 else:
655 return cmp_res
656
657 def convert_vpath_to_path(self, view_model, view_path):
658 filtered_model_path = view_model.convert_path_to_child_path(view_path)
659 filtered_model = view_model.get_model()
660
661 # get the path of the original model
662 path = filtered_model.convert_path_to_child_path(filtered_model_path)
663 return path
664
665 def convert_path_to_vpath(self, view_model, path):
666 it = view_model.get_iter_first()
667 while it:
668 name = self.find_item_for_path(path)
669 view_name = view_model.get_value(it, RecipeListModel.COL_NAME)
670 if view_name == name:
671 view_path = view_model.get_path(it)
672 return view_path
673 it = view_model.iter_next(it)
674 return None
675
676 """
677 The populate() function takes as input the data from a
678 bb.event.TargetsTreeGenerated event and populates the RecipeList.
679 """
680 def populate(self, event_model):
681 # First clear the model, in case repopulating
682 self.clear()
683
684 # dummy image for prompt
685 self.set_in_list(self.__custom_image__, "Use 'Edit image recipe' to customize recipes and packages " \
686 "to be included in your image ")
687
688 for item in event_model["pn"]:
689 name = item
690 desc = event_model["pn"][item]["description"]
691 lic = event_model["pn"][item]["license"]
692 group = event_model["pn"][item]["section"]
693 inherits = event_model["pn"][item]["inherits"]
694 summary = event_model["pn"][item]["summary"]
695 version = event_model["pn"][item]["version"]
696 revision = event_model["pn"][item]["prevision"]
697 homepage = event_model["pn"][item]["homepage"]
698 bugtracker = event_model["pn"][item]["bugtracker"]
699 filename = event_model["pn"][item]["filename"]
700 install = []
701
702 depends = event_model["depends"].get(item, []) + event_model["rdepends-pn"].get(item, [])
703
704 if ('packagegroup.bbclass' in " ".join(inherits)):
705 atype = 'packagegroup'
706 elif ('/image.bbclass' in " ".join(inherits)):
707 if "edited" not in name:
708 atype = 'image'
709 install = event_model["rdepends-pkg"].get(item, []) + event_model["rrecs-pkg"].get(item, [])
710 elif ('meta-' in name):
711 atype = 'toolchain'
712 elif (name == 'dummy-image' or name == 'dummy-toolchain'):
713 atype = 'dummy'
714 else:
715 atype = 'recipe'
716
717 self.set(self.append(), self.COL_NAME, item, self.COL_DESC, desc,
718 self.COL_LIC, lic, self.COL_GROUP, group,
719 self.COL_DEPS, " ".join(depends), self.COL_BINB, "",
720 self.COL_TYPE, atype, self.COL_INC, False,
721 self.COL_IMG, False, self.COL_INSTALL, " ".join(install), self.COL_PN, item,
722 self.COL_SUMMARY, summary, self.COL_VERSION, version, self.COL_REVISION, revision,
723 self.COL_HOMEPAGE, homepage, self.COL_BUGTRACKER, bugtracker,
724 self.COL_FILE, filename)
725
726 self.pn_path = {}
727 it = self.get_iter_first()
728 while it:
729 pn = self.get_value(it, self.COL_NAME)
730 path = self.get_path(it)
731 self.pn_path[pn] = path
732 it = self.iter_next(it)
733
734 def set_in_list(self, item, desc):
735 self.set(self.append(), self.COL_NAME, item,
736 self.COL_DESC, desc,
737 self.COL_LIC, "", self.COL_GROUP, "",
738 self.COL_DEPS, "", self.COL_BINB, "",
739 self.COL_TYPE, "image", self.COL_INC, False,
740 self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, item,
741 self.COL_SUMMARY, "", self.COL_VERSION, "", self.COL_REVISION, "",
742 self.COL_HOMEPAGE, "", self.COL_BUGTRACKER, "")
743 self.pn_path = {}
744 it = self.get_iter_first()
745 while it:
746 pn = self.get_value(it, self.COL_NAME)
747 path = self.get_path(it)
748 self.pn_path[pn] = path
749 it = self.iter_next(it)
750
751 """
752 Update the model, send out the notification.
753 """
754 def selection_change_notification(self):
755 self.emit("recipe-selection-changed")
756
757 def path_included(self, item_path):
758 return self[item_path][self.COL_INC]
759
760 """
761 Add this item, and any of its dependencies, to the image contents
762 """
763 def include_item(self, item_path, binb="", image_contents=False):
764 if self.path_included(item_path):
765 return
766
767 item_name = self[item_path][self.COL_NAME]
768 item_deps = self[item_path][self.COL_DEPS]
769
770 self[item_path][self.COL_INC] = True
771
772 item_bin = self[item_path][self.COL_BINB].split(', ')
773 if binb and not binb in item_bin:
774 item_bin.append(binb)
775 self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
776
777 # We want to do some magic with things which are brought in by the
778 # base image so tag them as so
779 if image_contents:
780 self[item_path][self.COL_IMG] = True
781
782 if item_deps:
783 # Ensure all of the items deps are included and, where appropriate,
784 # add this item to their COL_BINB
785 for dep in item_deps.split(" "):
786 # If the contents model doesn't already contain dep, add it
787 dep_path = self.find_path_for_item(dep)
788 if not dep_path:
789 continue
790 dep_included = self.path_included(dep_path)
791
792 if dep_included and not dep in item_bin:
793 # don't set the COL_BINB to this item if the target is an
794 # item in our own COL_BINB
795 dep_bin = self[dep_path][self.COL_BINB].split(', ')
796 if not item_name in dep_bin:
797 dep_bin.append(item_name)
798 self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
799 elif not dep_included:
800 self.include_item(dep_path, binb=item_name, image_contents=image_contents)
801 dep_bin = self[item_path][self.COL_BINB].split(', ')
802 if self[item_path][self.COL_NAME] in dep_bin:
803 dep_bin.remove(self[item_path][self.COL_NAME])
804 self[item_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
805
806 def exclude_item(self, item_path):
807 if not self.path_included(item_path):
808 return
809
810 self[item_path][self.COL_INC] = False
811
812 item_name = self[item_path][self.COL_NAME]
813 item_deps = self[item_path][self.COL_DEPS]
814 if item_deps:
815 for dep in item_deps.split(" "):
816 dep_path = self.find_path_for_item(dep)
817 if not dep_path:
818 continue
819 dep_bin = self[dep_path][self.COL_BINB].split(', ')
820 if item_name in dep_bin:
821 dep_bin.remove(item_name)
822 self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
823
824 item_bin = self[item_path][self.COL_BINB].split(', ')
825 if item_bin:
826 for binb in item_bin:
827 binb_path = self.find_path_for_item(binb)
828 if not binb_path:
829 continue
830 self.exclude_item(binb_path)
831
832 def reset(self):
833 it = self.get_iter_first()
834 while it:
835 self.set(it,
836 self.COL_INC, False,
837 self.COL_BINB, "",
838 self.COL_IMG, False)
839 it = self.iter_next(it)
840
841 self.selection_change_notification()
842
843 """
844 Returns two lists. One of user selected recipes and the other containing
845 all selected recipes
846 """
847 def get_selected_recipes(self):
848 allrecipes = []
849 userrecipes = []
850
851 it = self.get_iter_first()
852 while it:
853 if self.get_value(it, self.COL_INC):
854 name = self.get_value(it, self.COL_PN)
855 type = self.get_value(it, self.COL_TYPE)
856 if type != "image":
857 allrecipes.append(name)
858 sel = "User Selected" in self.get_value(it, self.COL_BINB)
859 if sel:
860 userrecipes.append(name)
861 it = self.iter_next(it)
862
863 return list(set(userrecipes)), list(set(allrecipes))
864
865 def set_selected_recipes(self, recipelist):
866 for pn in recipelist:
867 if pn in self.pn_path.keys():
868 path = self.pn_path[pn]
869 self.include_item(item_path=path,
870 binb="User Selected")
871 self.selection_change_notification()
872
873 def get_selected_image(self):
874 it = self.get_iter_first()
875 while it:
876 if self.get_value(it, self.COL_INC):
877 name = self.get_value(it, self.COL_PN)
878 type = self.get_value(it, self.COL_TYPE)
879 if type == "image":
880 sel = "User Selected" in self.get_value(it, self.COL_BINB)
881 if sel:
882 return name
883 it = self.iter_next(it)
884 return None
885
886 def set_selected_image(self, img):
887 if not img:
888 return
889 self.reset()
890 path = self.find_path_for_item(img)
891 self.include_item(item_path=path,
892 binb="User Selected",
893 image_contents=True)
894 self.selection_change_notification()
895
896 def set_custom_image_version(self, version):
897 self.custom_image_version = version
898
899 def get_custom_image_version(self):
900 return self.custom_image_version
901
902 def is_custom_image(self):
903 return self.get_selected_image() == self.__custom_image__
diff --git a/bitbake/lib/bb/ui/crumbs/hobpages.py b/bitbake/lib/bb/ui/crumbs/hobpages.py
new file mode 100755
index 0000000000..0fd3598c3a
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hobpages.py
@@ -0,0 +1,128 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24from bb.ui.crumbs.hobcolor import HobColors
25from bb.ui.crumbs.hobwidget import hwc
26
27#
28# HobPage: the super class for all Hob-related pages
29#
30class HobPage (gtk.VBox):
31
32 def __init__(self, builder, title = None):
33 super(HobPage, self).__init__(False, 0)
34 self.builder = builder
35 self.builder_width, self.builder_height = self.builder.size_request()
36
37 if not title:
38 self.title = "Hob -- Image Creator"
39 else:
40 self.title = title
41 self.title_label = gtk.Label()
42
43 self.box_group_area = gtk.VBox(False, 12)
44 self.box_group_area.set_size_request(self.builder_width - 73 - 73, self.builder_height - 88 - 15 - 15)
45 self.group_align = gtk.Alignment(xalign = 0, yalign=0.5, xscale=1, yscale=1)
46 self.group_align.set_padding(15, 15, 73, 73)
47 self.group_align.add(self.box_group_area)
48 self.box_group_area.set_homogeneous(False)
49
50 def set_title(self, title):
51 self.title = title
52 self.title_label.set_markup("<span size='x-large'>%s</span>" % self.title)
53
54 def add_onto_top_bar(self, widget = None, padding = 0):
55 # the top button occupies 1/7 of the page height
56 # setup an event box
57 eventbox = gtk.EventBox()
58 style = eventbox.get_style().copy()
59 style.bg[gtk.STATE_NORMAL] = eventbox.get_colormap().alloc_color(HobColors.LIGHT_GRAY, False, False)
60 eventbox.set_style(style)
61 eventbox.set_size_request(-1, 88)
62
63 hbox = gtk.HBox()
64
65 self.title_label = gtk.Label()
66 self.title_label.set_markup("<span size='x-large'>%s</span>" % self.title)
67 hbox.pack_start(self.title_label, expand=False, fill=False, padding=20)
68
69 if widget:
70 # add the widget in the event box
71 hbox.pack_end(widget, expand=False, fill=False, padding=padding)
72 eventbox.add(hbox)
73
74 return eventbox
75
76 def span_tag(self, size="medium", weight="normal", forground="#1c1c1c"):
77 span_tag = "weight='%s' foreground='%s' size='%s'" % (weight, forground, size)
78 return span_tag
79
80 def append_toolbar_button(self, toolbar, buttonname, icon_disp, icon_hovor, tip, cb):
81 # Create a button and append it on the toolbar according to button name
82 icon = gtk.Image()
83 icon_display = icon_disp
84 icon_hover = icon_hovor
85 pix_buffer = gtk.gdk.pixbuf_new_from_file(icon_display)
86 icon.set_from_pixbuf(pix_buffer)
87 tip_text = tip
88 button = toolbar.append_item(buttonname, tip, None, icon, cb)
89 return button
90
91 @staticmethod
92 def _size_to_string(size):
93 try:
94 if not size:
95 size_str = "0 B"
96 else:
97 if len(str(int(size))) > 6:
98 size_str = '%.1f' % (size*1.0/(1024*1024)) + ' MB'
99 elif len(str(int(size))) > 3:
100 size_str = '%.1f' % (size*1.0/1024) + ' KB'
101 else:
102 size_str = str(size) + ' B'
103 except:
104 size_str = "0 B"
105 return size_str
106
107 @staticmethod
108 def _string_to_size(str_size):
109 try:
110 if not str_size:
111 size = 0
112 else:
113 unit = str_size.split()
114 if len(unit) > 1:
115 if unit[1] == 'MB':
116 size = float(unit[0])*1024*1024
117 elif unit[1] == 'KB':
118 size = float(unit[0])*1024
119 elif unit[1] == 'B':
120 size = float(unit[0])
121 else:
122 size = 0
123 else:
124 size = float(unit[0])
125 except:
126 size = 0
127 return size
128
diff --git a/bitbake/lib/bb/ui/crumbs/hobwidget.py b/bitbake/lib/bb/ui/crumbs/hobwidget.py
new file mode 100644
index 0000000000..2b969c146e
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hobwidget.py
@@ -0,0 +1,904 @@
1# BitBake Graphical GTK User Interface
2#
3# Copyright (C) 2011-2012 Intel Corporation
4#
5# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
6# Authored by Shane Wang <shane.wang@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20import gtk
21import gobject
22import os
23import os.path
24import sys
25import pango, pangocairo
26import cairo
27import math
28
29from bb.ui.crumbs.hobcolor import HobColors
30from bb.ui.crumbs.persistenttooltip import PersistentTooltip
31
32class hwc:
33
34 MAIN_WIN_WIDTH = 1024
35 MAIN_WIN_HEIGHT = 700
36
37class hic:
38
39 HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("ui/icons/"))
40
41 ICON_RCIPE_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_display.png'))
42 ICON_RCIPE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_hover.png'))
43 ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_display.png'))
44 ICON_PACKAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_hover.png'))
45 ICON_LAYERS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_display.png'))
46 ICON_LAYERS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_hover.png'))
47 ICON_IMAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_display.png'))
48 ICON_IMAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_hover.png'))
49 ICON_SETTINGS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_display.png'))
50 ICON_SETTINGS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_hover.png'))
51 ICON_INFO_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
52 ICON_INFO_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_hover.png'))
53 ICON_INDI_CONFIRM_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/confirmation.png'))
54 ICON_INDI_ERROR_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/denied.png'))
55 ICON_INDI_REMOVE_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove.png'))
56 ICON_INDI_REMOVE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove-hover.png'))
57 ICON_INDI_ADD_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add.png'))
58 ICON_INDI_ADD_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add-hover.png'))
59 ICON_INDI_REFRESH_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/refresh.png'))
60 ICON_INDI_ALERT_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/alert.png'))
61 ICON_INDI_TICK_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/tick.png'))
62 ICON_INDI_INFO_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/info.png'))
63
64class HobViewTable (gtk.VBox):
65 """
66 A VBox to contain the table for different recipe views and package view
67 """
68 __gsignals__ = {
69 "toggled" : (gobject.SIGNAL_RUN_LAST,
70 gobject.TYPE_NONE,
71 (gobject.TYPE_PYOBJECT,
72 gobject.TYPE_STRING,
73 gobject.TYPE_INT,
74 gobject.TYPE_PYOBJECT,)),
75 "row-activated" : (gobject.SIGNAL_RUN_LAST,
76 gobject.TYPE_NONE,
77 (gobject.TYPE_PYOBJECT,
78 gobject.TYPE_PYOBJECT,)),
79 "cell-fadeinout-stopped" : (gobject.SIGNAL_RUN_LAST,
80 gobject.TYPE_NONE,
81 (gobject.TYPE_PYOBJECT,
82 gobject.TYPE_PYOBJECT,
83 gobject.TYPE_PYOBJECT,)),
84 }
85
86 def __init__(self, columns, name):
87 gtk.VBox.__init__(self, False, 6)
88 self.table_tree = gtk.TreeView()
89 self.table_tree.set_headers_visible(True)
90 self.table_tree.set_headers_clickable(True)
91 self.table_tree.set_rules_hint(True)
92 self.table_tree.set_enable_tree_lines(True)
93 self.table_tree.get_selection().set_mode(gtk.SELECTION_SINGLE)
94 self.toggle_columns = []
95 self.table_tree.connect("row-activated", self.row_activated_cb)
96 self.top_bar = None
97 self.tab_name = name
98
99 for i, column in enumerate(columns):
100 col_name = column['col_name']
101 col = gtk.TreeViewColumn(col_name)
102 col.set_clickable(True)
103 col.set_resizable(True)
104 if self.tab_name.startswith('Included'):
105 if col_name!='Included':
106 col.set_sort_column_id(column['col_id'])
107 else:
108 col.set_sort_column_id(column['col_id'])
109 if 'col_min' in column.keys():
110 col.set_min_width(column['col_min'])
111 if 'col_max' in column.keys():
112 col.set_max_width(column['col_max'])
113 if 'expand' in column.keys():
114 col.set_expand(True)
115 self.table_tree.append_column(col)
116
117 if (not 'col_style' in column.keys()) or column['col_style'] == 'text':
118 cell = gtk.CellRendererText()
119 col.pack_start(cell, True)
120 col.set_attributes(cell, text=column['col_id'])
121 if 'col_t_id' in column.keys():
122 col.add_attribute(cell, 'font', column['col_t_id'])
123 elif column['col_style'] == 'check toggle':
124 cell = HobCellRendererToggle()
125 cell.set_property('activatable', True)
126 cell.connect("toggled", self.toggled_cb, i, self.table_tree)
127 cell.connect_render_state_changed(self.stop_cell_fadeinout_cb, self.table_tree)
128 self.toggle_id = i
129 col.pack_end(cell, True)
130 col.set_attributes(cell, active=column['col_id'])
131 self.toggle_columns.append(col_name)
132 if 'col_group' in column.keys():
133 col.set_cell_data_func(cell, self.set_group_number_cb)
134 elif column['col_style'] == 'radio toggle':
135 cell = gtk.CellRendererToggle()
136 cell.set_property('activatable', True)
137 cell.set_radio(True)
138 cell.connect("toggled", self.toggled_cb, i, self.table_tree)
139 self.toggle_id = i
140 col.pack_end(cell, True)
141 col.set_attributes(cell, active=column['col_id'])
142 self.toggle_columns.append(col_name)
143 elif column['col_style'] == 'binb':
144 cell = gtk.CellRendererText()
145 col.pack_start(cell, True)
146 col.set_cell_data_func(cell, self.display_binb_cb, column['col_id'])
147 if 'col_t_id' in column.keys():
148 col.add_attribute(cell, 'font', column['col_t_id'])
149
150 self.scroll = gtk.ScrolledWindow()
151 self.scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
152 self.scroll.add(self.table_tree)
153
154 self.pack_end(self.scroll, True, True, 0)
155
156 def add_no_result_bar(self, entry):
157 color = HobColors.KHAKI
158 self.top_bar = gtk.EventBox()
159 self.top_bar.set_size_request(-1, 70)
160 self.top_bar.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
161 self.top_bar.set_flags(gtk.CAN_DEFAULT)
162 self.top_bar.grab_default()
163
164 no_result_tab = gtk.Table(5, 20, True)
165 self.top_bar.add(no_result_tab)
166
167 label = gtk.Label()
168 label.set_alignment(0.0, 0.5)
169 title = "No results matching your search"
170 label.set_markup("<span size='x-large'><b>%s</b></span>" % title)
171 no_result_tab.attach(label, 1, 14, 1, 4)
172
173 clear_button = HobButton("Clear search")
174 clear_button.set_tooltip_text("Clear search query")
175 clear_button.connect('clicked', self.set_search_entry_clear_cb, entry)
176 no_result_tab.attach(clear_button, 16, 19, 1, 4)
177
178 self.pack_start(self.top_bar, False, True, 12)
179 self.top_bar.show_all()
180
181 def set_search_entry_clear_cb(self, button, search):
182 if search.get_editable() == True:
183 search.set_text("")
184 search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
185 search.grab_focus()
186
187 def display_binb_cb(self, col, cell, model, it, col_id):
188 binb = model.get_value(it, col_id)
189 # Just display the first item
190 if binb:
191 bin = binb.split(', ')
192 total_no = len(bin)
193 if total_no > 1 and bin[0] == "User Selected":
194 if total_no > 2:
195 present_binb = bin[1] + ' (+' + str(total_no - 1) + ')'
196 else:
197 present_binb = bin[1]
198 else:
199 if total_no > 1:
200 present_binb = bin[0] + ' (+' + str(total_no - 1) + ')'
201 else:
202 present_binb = bin[0]
203 cell.set_property('text', present_binb)
204 else:
205 cell.set_property('text', "")
206 return True
207
208 def set_model(self, tree_model):
209 self.table_tree.set_model(tree_model)
210
211 def toggle_default(self):
212 model = self.table_tree.get_model()
213 if not model:
214 return
215 iter = model.get_iter_first()
216 if iter:
217 rowpath = model.get_path(iter)
218 model[rowpath][self.toggle_id] = True
219
220 def toggled_cb(self, cell, path, columnid, tree):
221 self.emit("toggled", cell, path, columnid, tree)
222
223 def row_activated_cb(self, tree, path, view_column):
224 if not view_column.get_title() in self.toggle_columns:
225 self.emit("row-activated", tree.get_model(), path)
226
227 def stop_cell_fadeinout_cb(self, ctrl, cell, tree):
228 self.emit("cell-fadeinout-stopped", ctrl, cell, tree)
229
230 def set_group_number_cb(self, col, cell, model, iter):
231 if model and (model.iter_parent(iter) == None):
232 cell.cell_attr["number_of_children"] = model.iter_n_children(iter)
233 else:
234 cell.cell_attr["number_of_children"] = 0
235
236 def connect_group_selection(self, cb_func):
237 self.table_tree.get_selection().connect("changed", cb_func)
238
239"""
240A method to calculate a softened value for the colour of widget when in the
241provided state.
242
243widget: the widget whose style to use
244state: the state of the widget to use the style for
245
246Returns a string value representing the softened colour
247"""
248def soften_color(widget, state=gtk.STATE_NORMAL):
249 # this colour munging routine is heavily inspired bu gdu_util_get_mix_color()
250 # from gnome-disk-utility:
251 # http://git.gnome.org/browse/gnome-disk-utility/tree/src/gdu-gtk/gdu-gtk.c?h=gnome-3-0
252 blend = 0.7
253 style = widget.get_style()
254 color = style.text[state]
255 color.red = color.red * blend + style.base[state].red * (1.0 - blend)
256 color.green = color.green * blend + style.base[state].green * (1.0 - blend)
257 color.blue = color.blue * blend + style.base[state].blue * (1.0 - blend)
258 return color.to_string()
259
260class BaseHobButton(gtk.Button):
261 """
262 A gtk.Button subclass which follows the visual design of Hob for primary
263 action buttons
264
265 label: the text to display as the button's label
266 """
267 def __init__(self, label):
268 gtk.Button.__init__(self, label)
269 HobButton.style_button(self)
270
271 @staticmethod
272 def style_button(button):
273 style = button.get_style()
274 style = gtk.rc_get_style_by_paths(gtk.settings_get_default(), 'gtk-button', 'gtk-button', gobject.TYPE_NONE)
275
276 button.set_flags(gtk.CAN_DEFAULT)
277 button.grab_default()
278
279# label = "<span size='x-large'><b>%s</b></span>" % gobject.markup_escape_text(button.get_label())
280 label = button.get_label()
281 button.set_label(label)
282 button.child.set_use_markup(True)
283
284class HobButton(BaseHobButton):
285 """
286 A gtk.Button subclass which follows the visual design of Hob for primary
287 action buttons
288
289 label: the text to display as the button's label
290 """
291 def __init__(self, label):
292 BaseHobButton.__init__(self, label)
293 HobButton.style_button(self)
294
295class HobAltButton(BaseHobButton):
296 """
297 A gtk.Button subclass which has no relief, and so is more discrete
298 """
299 def __init__(self, label):
300 BaseHobButton.__init__(self, label)
301 HobAltButton.style_button(self)
302
303 """
304 A callback for the state-changed event to ensure the text is displayed
305 differently when the widget is not sensitive
306 """
307 @staticmethod
308 def desensitise_on_state_change_cb(button, state):
309 if not button.get_property("sensitive"):
310 HobAltButton.set_text(button, False)
311 else:
312 HobAltButton.set_text(button, True)
313
314 """
315 Set the button label with an appropriate colour for the current widget state
316 """
317 @staticmethod
318 def set_text(button, sensitive=True):
319 if sensitive:
320 colour = HobColors.PALE_BLUE
321 else:
322 colour = HobColors.LIGHT_GRAY
323 button.set_label("<span size='large' color='%s'><b>%s</b></span>" % (colour, gobject.markup_escape_text(button.text)))
324 button.child.set_use_markup(True)
325
326class HobImageButton(gtk.Button):
327 """
328 A gtk.Button with an icon and two rows of text, the second of which is
329 displayed in a blended colour.
330
331 primary_text: the main button label
332 secondary_text: optional second line of text
333 icon_path: path to the icon file to display on the button
334 """
335 def __init__(self, primary_text, secondary_text="", icon_path="", hover_icon_path=""):
336 gtk.Button.__init__(self)
337 self.set_relief(gtk.RELIEF_NONE)
338
339 self.icon_path = icon_path
340 self.hover_icon_path = hover_icon_path
341
342 hbox = gtk.HBox(False, 10)
343 hbox.show()
344 self.add(hbox)
345 self.icon = gtk.Image()
346 self.icon.set_from_file(self.icon_path)
347 self.icon.set_alignment(0.5, 0.0)
348 self.icon.show()
349 if self.hover_icon_path and len(self.hover_icon_path):
350 self.connect("enter-notify-event", self.set_hover_icon_cb)
351 self.connect("leave-notify-event", self.set_icon_cb)
352 hbox.pack_start(self.icon, False, False, 0)
353 label = gtk.Label()
354 label.set_alignment(0.0, 0.5)
355 colour = soften_color(label)
356 mark = "<span size='x-large'>%s</span>\n<span size='medium' fgcolor='%s' weight='ultralight'>%s</span>" % (primary_text, colour, secondary_text)
357 label.set_markup(mark)
358 label.show()
359 hbox.pack_start(label, True, True, 0)
360
361 def set_hover_icon_cb(self, widget, event):
362 self.icon.set_from_file(self.hover_icon_path)
363
364 def set_icon_cb(self, widget, event):
365 self.icon.set_from_file(self.icon_path)
366
367class HobInfoButton(gtk.EventBox):
368 """
369 This class implements a button-like widget per the Hob visual and UX designs
370 which will display a persistent tooltip, with the contents of tip_markup, when
371 clicked.
372
373 tip_markup: the Pango Markup to be displayed in the persistent tooltip
374 """
375 def __init__(self, tip_markup, parent=None):
376 gtk.EventBox.__init__(self)
377 self.image = gtk.Image()
378 self.image.set_from_file(
379 hic.ICON_INFO_DISPLAY_FILE)
380 self.image.show()
381 self.add(self.image)
382 self.tip_markup = tip_markup
383 self.my_parent = parent
384
385 self.set_events(gtk.gdk.BUTTON_RELEASE |
386 gtk.gdk.ENTER_NOTIFY_MASK |
387 gtk.gdk.LEAVE_NOTIFY_MASK)
388
389 self.connect("button-release-event", self.button_release_cb)
390 self.connect("enter-notify-event", self.mouse_in_cb)
391 self.connect("leave-notify-event", self.mouse_out_cb)
392
393 """
394 When the mouse click is released emulate a button-click and show the associated
395 PersistentTooltip
396 """
397 def button_release_cb(self, widget, event):
398 from bb.ui.crumbs.hig.propertydialog import PropertyDialog
399 self.dialog = PropertyDialog(title = '',
400 parent = self.my_parent,
401 information = self.tip_markup,
402 flags = gtk.DIALOG_DESTROY_WITH_PARENT
403 | gtk.DIALOG_NO_SEPARATOR)
404
405 button = self.dialog.add_button("Close", gtk.RESPONSE_CANCEL)
406 HobAltButton.style_button(button)
407 button.connect("clicked", lambda w: self.dialog.destroy())
408 self.dialog.show_all()
409 self.dialog.run()
410
411 """
412 Change to the prelight image when the mouse enters the widget
413 """
414 def mouse_in_cb(self, widget, event):
415 self.image.set_from_file(hic.ICON_INFO_HOVER_FILE)
416
417 """
418 Change to the stock image when the mouse enters the widget
419 """
420 def mouse_out_cb(self, widget, event):
421 self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
422
423class HobIndicator(gtk.DrawingArea):
424 def __init__(self, count):
425 gtk.DrawingArea.__init__(self)
426 # Set no window for transparent background
427 self.set_has_window(False)
428 self.set_size_request(38,38)
429 # We need to pass through button clicks
430 self.add_events(gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
431
432 self.connect('expose-event', self.expose)
433
434 self.count = count
435 self.color = HobColors.GRAY
436
437 def expose(self, widget, event):
438 if self.count and self.count > 0:
439 ctx = widget.window.cairo_create()
440
441 x, y, w, h = self.allocation
442
443 ctx.set_operator(cairo.OPERATOR_OVER)
444 ctx.set_source_color(gtk.gdk.color_parse(self.color))
445 ctx.translate(w/2, h/2)
446 ctx.arc(x, y, min(w,h)/2 - 2, 0, 2*math.pi)
447 ctx.fill_preserve()
448
449 layout = self.create_pango_layout(str(self.count))
450 textw, texth = layout.get_pixel_size()
451 x = (w/2)-(textw/2) + x
452 y = (h/2) - (texth/2) + y
453 ctx.move_to(x, y)
454 self.window.draw_layout(self.style.light_gc[gtk.STATE_NORMAL], int(x), int(y), layout)
455
456 def set_count(self, count):
457 self.count = count
458
459 def set_active(self, active):
460 if active:
461 self.color = HobColors.DEEP_RED
462 else:
463 self.color = HobColors.GRAY
464
465class HobTabLabel(gtk.HBox):
466 def __init__(self, text, count=0):
467 gtk.HBox.__init__(self, False, 0)
468 self.indicator = HobIndicator(count)
469 self.indicator.show()
470 self.pack_end(self.indicator, False, False)
471 self.lbl = gtk.Label(text)
472 self.lbl.set_alignment(0.0, 0.5)
473 self.lbl.show()
474 self.pack_end(self.lbl, True, True, 6)
475
476 def set_count(self, count):
477 self.indicator.set_count(count)
478
479 def set_active(self, active=True):
480 self.indicator.set_active(active)
481
482class HobNotebook(gtk.Notebook):
483 def __init__(self):
484 gtk.Notebook.__init__(self)
485 self.set_property('homogeneous', True)
486
487 self.pages = []
488
489 self.search = None
490 self.search_focus = False
491 self.page_changed = False
492
493 self.connect("switch-page", self.page_changed_cb)
494
495 self.show_all()
496
497 def page_changed_cb(self, nb, page, page_num):
498 for p, lbl in enumerate(self.pages):
499 if p == page_num:
500 lbl.set_active()
501 else:
502 lbl.set_active(False)
503
504 if self.search:
505 self.page_changed = True
506 self.reset_entry(self.search, page_num)
507
508 def append_page(self, child, tab_label, tab_tooltip=None):
509 label = HobTabLabel(tab_label)
510 if tab_tooltip:
511 label.set_tooltip_text(tab_tooltip)
512 label.set_active(False)
513 self.pages.append(label)
514 gtk.Notebook.append_page(self, child, label)
515
516 def set_entry(self, names, tips):
517 self.search = gtk.Entry()
518 self.search_names = names
519 self.search_tips = tips
520 style = self.search.get_style()
521 style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
522 self.search.set_style(style)
523 self.search.set_text(names[0])
524 self.search.set_tooltip_text(self.search_tips[0])
525 self.search.props.has_tooltip = True
526
527 self.search.set_editable(False)
528 self.search.set_icon_from_stock(gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR)
529 self.search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
530 self.search.connect("icon-release", self.set_search_entry_clear_cb)
531 self.search.set_width_chars(30)
532 self.search.show()
533
534 self.search.connect("focus-in-event", self.set_search_entry_editable_cb)
535 self.search.connect("focus-out-event", self.set_search_entry_reset_cb)
536 self.set_action_widget(self.search, gtk.PACK_END)
537
538 def show_indicator_icon(self, title, number):
539 for child in self.pages:
540 if child.lbl.get_label() == title:
541 child.set_count(number)
542
543 def hide_indicator_icon(self, title):
544 for child in self.pages:
545 if child.lbl.get_label() == title:
546 child.set_count(0)
547
548 def set_search_entry_editable_cb(self, search, event):
549 self.search_focus = True
550 search.set_editable(True)
551 text = search.get_text()
552 if text in self.search_names:
553 search.set_text("")
554 style = self.search.get_style()
555 style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.BLACK, False, False)
556 search.set_style(style)
557
558 def set_search_entry_reset_cb(self, search, event):
559 page_num = self.get_current_page()
560 text = search.get_text()
561 if not text:
562 self.reset_entry(search, page_num)
563
564 def reset_entry(self, entry, page_num):
565 style = entry.get_style()
566 style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
567 entry.set_style(style)
568 entry.set_text(self.search_names[page_num])
569 entry.set_tooltip_text(self.search_tips[page_num])
570 entry.set_editable(False)
571 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
572
573 def set_search_entry_clear_cb(self, search, icon_pos, event):
574 if search.get_editable() == True:
575 search.set_text("")
576 search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
577 search.grab_focus()
578
579 def set_page(self, title):
580 for child in self.pages:
581 if child.lbl.get_label() == title:
582 child.grab_focus()
583 self.set_current_page(self.pages.index(child))
584 return
585
586class HobWarpCellRendererText(gtk.CellRendererText):
587 def __init__(self, col_number):
588 gtk.CellRendererText.__init__(self)
589 self.set_property("wrap-mode", pango.WRAP_WORD_CHAR)
590 self.set_property("wrap-width", 300) # default value wrap width is 300
591 self.col_n = col_number
592
593 def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
594 if widget:
595 self.props.wrap_width = self.get_resized_wrap_width(widget, widget.get_column(self.col_n))
596 return gtk.CellRendererText.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
597
598 def get_resized_wrap_width(self, treeview, column):
599 otherCols = []
600 for col in treeview.get_columns():
601 if col != column:
602 otherCols.append(col)
603 adjwidth = treeview.allocation.width - sum(c.get_width() for c in otherCols)
604 adjwidth -= treeview.style_get_property("horizontal-separator") * 4
605 if self.props.wrap_width == adjwidth or adjwidth <= 0:
606 adjwidth = self.props.wrap_width
607 return adjwidth
608
609gobject.type_register(HobWarpCellRendererText)
610
611class HobIconChecker(hic):
612 def set_hob_icon_to_stock_icon(self, file_path, stock_id=""):
613 try:
614 pixbuf = gtk.gdk.pixbuf_new_from_file(file_path)
615 except Exception, e:
616 return None
617
618 if stock_id and (gtk.icon_factory_lookup_default(stock_id) == None):
619 icon_factory = gtk.IconFactory()
620 icon_factory.add_default()
621 icon_factory.add(stock_id, gtk.IconSet(pixbuf))
622 gtk.stock_add([(stock_id, '_label', 0, 0, '')])
623
624 return icon_factory.lookup(stock_id)
625
626 return None
627
628 """
629 For make hob icon consistently by request, and avoid icon view diff by system or gtk version, we use some 'hob icon' to replace the 'gtk icon'.
630 this function check the stock_id and make hob_id to replaced the gtk_id then return it or ""
631 """
632 def check_stock_icon(self, stock_name=""):
633 HOB_CHECK_STOCK_NAME = {
634 ('hic-dialog-info', 'gtk-dialog-info', 'dialog-info') : self.ICON_INDI_INFO_FILE,
635 ('hic-ok', 'gtk-ok', 'ok') : self.ICON_INDI_TICK_FILE,
636 ('hic-dialog-error', 'gtk-dialog-error', 'dialog-error') : self.ICON_INDI_ERROR_FILE,
637 ('hic-dialog-warning', 'gtk-dialog-warning', 'dialog-warning') : self.ICON_INDI_ALERT_FILE,
638 ('hic-task-refresh', 'gtk-execute', 'execute') : self.ICON_INDI_REFRESH_FILE,
639 }
640 valid_stock_id = stock_name
641 if stock_name:
642 for names, path in HOB_CHECK_STOCK_NAME.iteritems():
643 if stock_name in names:
644 valid_stock_id = names[0]
645 if not gtk.icon_factory_lookup_default(valid_stock_id):
646 self.set_hob_icon_to_stock_icon(path, valid_stock_id)
647
648 return valid_stock_id
649
650class HobCellRendererController(gobject.GObject):
651 (MODE_CYCLE_RUNNING, MODE_ONE_SHORT) = range(2)
652 __gsignals__ = {
653 "run-timer-stopped" : (gobject.SIGNAL_RUN_LAST,
654 gobject.TYPE_NONE,
655 ()),
656 }
657 def __init__(self, runningmode=MODE_CYCLE_RUNNING, is_draw_row=False):
658 gobject.GObject.__init__(self)
659 self.timeout_id = None
660 self.current_angle_pos = 0.0
661 self.step_angle = 0.0
662 self.tree_headers_height = 0
663 self.running_cell_areas = []
664 self.running_mode = runningmode
665 self.is_queue_draw_row_area = is_draw_row
666 self.force_stop_enable = False
667
668 def is_active(self):
669 if self.timeout_id:
670 return True
671 else:
672 return False
673
674 def reset_run(self):
675 self.force_stop()
676 self.running_cell_areas = []
677 self.current_angle_pos = 0.0
678 self.step_angle = 0.0
679
680 ''' time_iterval: (1~1000)ms, which will be as the basic interval count for timer
681 init_usrdata: the current data which related the progress-bar will be at
682 min_usrdata: the range of min of user data
683 max_usrdata: the range of max of user data
684 step: each step which you want to progress
685 Note: the init_usrdata should in the range of from min to max, and max should > min
686 step should < (max - min)
687 '''
688 def start_run(self, time_iterval, init_usrdata, min_usrdata, max_usrdata, step, tree):
689 if (not time_iterval) or (not max_usrdata):
690 return
691 usr_range = (max_usrdata - min_usrdata) * 1.0
692 self.current_angle_pos = (init_usrdata * 1.0) / usr_range
693 self.step_angle = (step * 1) / usr_range
694 self.timeout_id = gobject.timeout_add(int(time_iterval),
695 self.make_image_on_progressing_cb, tree)
696 self.tree_headers_height = self.get_treeview_headers_height(tree)
697 self.force_stop_enable = False
698
699 def force_stop(self):
700 self.emit("run-timer-stopped")
701 self.force_stop_enable = True
702 if self.timeout_id:
703 if gobject.source_remove(self.timeout_id):
704 self.timeout_id = None
705
706 def on_draw_pixbuf_cb(self, pixbuf, cr, x, y, img_width, img_height, do_refresh=True):
707 if pixbuf:
708 r = max(img_width/2, img_height/2)
709 cr.translate(x + r, y + r)
710 if do_refresh:
711 cr.rotate(2 * math.pi * self.current_angle_pos)
712
713 cr.set_source_pixbuf(pixbuf, -img_width/2, -img_height/2)
714 cr.paint()
715
716 def on_draw_fadeinout_cb(self, cr, color, x, y, width, height, do_fadeout=True):
717 if do_fadeout:
718 alpha = self.current_angle_pos * 0.8
719 else:
720 alpha = (1.0 - self.current_angle_pos) * 0.8
721
722 cr.set_source_rgba(color.red, color.green, color.blue, alpha)
723 cr.rectangle(x, y, width, height)
724 cr.fill()
725
726 def get_treeview_headers_height(self, tree):
727 if tree and (tree.get_property("headers-visible") == True):
728 height = tree.get_allocation().height - tree.get_bin_window().get_size()[1]
729 return height
730
731 return 0
732
733 def make_image_on_progressing_cb(self, tree):
734 self.current_angle_pos += self.step_angle
735 if self.running_mode == self.MODE_CYCLE_RUNNING:
736 if (self.current_angle_pos >= 1):
737 self.current_angle_pos = 0
738 else:
739 if self.current_angle_pos > 1:
740 self.force_stop()
741 return False
742
743 if self.is_queue_draw_row_area:
744 for path in self.running_cell_areas:
745 rect = tree.get_cell_area(path, tree.get_column(0))
746 row_x, _, row_width, _ = tree.get_visible_rect()
747 tree.queue_draw_area(row_x, rect.y + self.tree_headers_height, row_width, rect.height)
748 else:
749 for rect in self.running_cell_areas:
750 tree.queue_draw_area(rect.x, rect.y + self.tree_headers_height, rect.width, rect.height)
751
752 return (not self.force_stop_enable)
753
754 def append_running_cell_area(self, cell_area):
755 if cell_area and (cell_area not in self.running_cell_areas):
756 self.running_cell_areas.append(cell_area)
757
758 def remove_running_cell_area(self, cell_area):
759 if cell_area in self.running_cell_areas:
760 self.running_cell_areas.remove(cell_area)
761 if not self.running_cell_areas:
762 self.reset_run()
763
764gobject.type_register(HobCellRendererController)
765
766class HobCellRendererPixbuf(gtk.CellRendererPixbuf):
767 def __init__(self):
768 gtk.CellRendererPixbuf.__init__(self)
769 self.control = HobCellRendererController()
770 # add icon checker for make the gtk-icon transfer to hob-icon
771 self.checker = HobIconChecker()
772 self.set_property("stock-size", gtk.ICON_SIZE_DND)
773
774 def get_pixbuf_from_stock_icon(self, widget, stock_id="", size=gtk.ICON_SIZE_DIALOG):
775 if widget and stock_id and gtk.icon_factory_lookup_default(stock_id):
776 return widget.render_icon(stock_id, size)
777
778 return None
779
780 def set_icon_name_to_id(self, new_name):
781 if new_name and type(new_name) == str:
782 # check the name is need to transfer to hob icon or not
783 name = self.checker.check_stock_icon(new_name)
784 if name.startswith("hic") or name.startswith("gtk"):
785 stock_id = name
786 else:
787 stock_id = 'gtk-' + name
788
789 return stock_id
790
791 ''' render cell exactly, "icon-name" is priority
792 if use the 'hic-task-refresh' will make the pix animation
793 if 'pix' will change the pixbuf for it from the pixbuf or image.
794 '''
795 def do_render(self, window, tree, background_area,cell_area, expose_area, flags):
796 if (not self.control) or (not tree):
797 return
798
799 x, y, w, h = self.on_get_size(tree, cell_area)
800 x += cell_area.x
801 y += cell_area.y
802 w -= 2 * self.get_property("xpad")
803 h -= 2 * self.get_property("ypad")
804
805 stock_id = ""
806 if self.props.icon_name:
807 stock_id = self.set_icon_name_to_id(self.props.icon_name)
808 elif self.props.stock_id:
809 stock_id = self.props.stock_id
810 elif self.props.pixbuf:
811 pix = self.props.pixbuf
812 else:
813 return
814
815 if stock_id:
816 pix = self.get_pixbuf_from_stock_icon(tree, stock_id, self.props.stock_size)
817 if stock_id == 'hic-task-refresh':
818 self.control.append_running_cell_area(cell_area)
819 if self.control.is_active():
820 self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, True)
821 else:
822 self.control.start_run(200, 0, 0, 1000, 150, tree)
823 else:
824 self.control.remove_running_cell_area(cell_area)
825 self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, False)
826
827 def on_get_size(self, widget, cell_area):
828 if self.props.icon_name or self.props.pixbuf or self.props.stock_id:
829 w, h = gtk.icon_size_lookup(self.props.stock_size)
830 calc_width = self.get_property("xpad") * 2 + w
831 calc_height = self.get_property("ypad") * 2 + h
832 x_offset = 0
833 y_offset = 0
834 if cell_area and w > 0 and h > 0:
835 x_offset = self.get_property("xalign") * (cell_area.width - calc_width - self.get_property("xpad"))
836 y_offset = self.get_property("yalign") * (cell_area.height - calc_height - self.get_property("ypad"))
837
838 return x_offset, y_offset, w, h
839
840 return 0, 0, 0, 0
841
842gobject.type_register(HobCellRendererPixbuf)
843
844class HobCellRendererToggle(gtk.CellRendererToggle):
845 def __init__(self):
846 gtk.CellRendererToggle.__init__(self)
847 self.ctrl = HobCellRendererController(is_draw_row=True)
848 self.ctrl.running_mode = self.ctrl.MODE_ONE_SHORT
849 self.cell_attr = {"fadeout": False, "number_of_children": 0}
850
851 def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
852 if (not self.ctrl) or (not widget):
853 return
854
855 if flags & gtk.CELL_RENDERER_SELECTED:
856 state = gtk.STATE_SELECTED
857 else:
858 state = gtk.STATE_NORMAL
859
860 if self.ctrl.is_active():
861 path = widget.get_path_at_pos(cell_area.x + cell_area.width/2, cell_area.y + cell_area.height/2)
862 # sometimes the parameters of cell_area will be a negative number,such as pull up down the scroll bar
863 # it's over the tree container range, so the path will be bad
864 if not path: return
865 path = path[0]
866 if path in self.ctrl.running_cell_areas:
867 cr = window.cairo_create()
868 color = widget.get_style().base[state]
869
870 row_x, _, row_width, _ = widget.get_visible_rect()
871 border_y = self.get_property("ypad")
872 self.ctrl.on_draw_fadeinout_cb(cr, color, row_x, cell_area.y - border_y, row_width, \
873 cell_area.height + border_y * 2, self.cell_attr["fadeout"])
874 # draw number of a group
875 if self.cell_attr["number_of_children"]:
876 text = "%d pkg" % self.cell_attr["number_of_children"]
877 pangolayout = widget.create_pango_layout(text)
878 textw, texth = pangolayout.get_pixel_size()
879 x = cell_area.x + (cell_area.width/2) - (textw/2)
880 y = cell_area.y + (cell_area.height/2) - (texth/2)
881
882 widget.style.paint_layout(window, state, True, cell_area, widget, "checkbox", x, y, pangolayout)
883 else:
884 return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
885
886 '''delay: normally delay time is 1000ms
887 cell_list: whilch cells need to be render
888 '''
889 def fadeout(self, tree, delay, cell_list=None):
890 if (delay < 200) or (not tree):
891 return
892 self.cell_attr["fadeout"] = True
893 self.ctrl.running_cell_areas = cell_list
894 self.ctrl.start_run(200, 0, 0, delay, (delay * 200 / 1000), tree)
895
896 def connect_render_state_changed(self, func, usrdata=None):
897 if not func:
898 return
899 if usrdata:
900 self.ctrl.connect("run-timer-stopped", func, self, usrdata)
901 else:
902 self.ctrl.connect("run-timer-stopped", func, self)
903
904gobject.type_register(HobCellRendererToggle)
diff --git a/bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py b/bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py
new file mode 100644
index 0000000000..2766bea8c7
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py
@@ -0,0 +1,561 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import glib
25import re
26from bb.ui.crumbs.progressbar import HobProgressBar
27from bb.ui.crumbs.hobcolor import HobColors
28from bb.ui.crumbs.hobwidget import hic, HobImageButton, HobInfoButton, HobAltButton, HobButton
29from bb.ui.crumbs.hoblistmodel import RecipeListModel
30from bb.ui.crumbs.hobpages import HobPage
31from bb.ui.crumbs.hig.retrieveimagedialog import RetrieveImageDialog
32
33#
34# ImageConfigurationPage
35#
36class ImageConfigurationPage (HobPage):
37
38 __dummy_machine__ = "--select a machine--"
39 __dummy_image__ = "--select an image recipe--"
40 __custom_image__ = "Select from my image recipes"
41
42 def __init__(self, builder):
43 super(ImageConfigurationPage, self).__init__(builder, "Image configuration")
44
45 self.image_combo_id = None
46 # we use machine_combo_changed_by_manual to identify the machine is changed by code
47 # or by manual. If by manual, all user's recipe selection and package selection are
48 # cleared.
49 self.machine_combo_changed_by_manual = True
50 self.stopping = False
51 self.warning_shift = 0
52 self.custom_image_selected = None
53 self.create_visual_elements()
54
55 def create_visual_elements(self):
56 # create visual elements
57 self.toolbar = gtk.Toolbar()
58 self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
59 self.toolbar.set_style(gtk.TOOLBAR_BOTH)
60
61 my_images_button = self.append_toolbar_button(self.toolbar,
62 "Images",
63 hic.ICON_IMAGES_DISPLAY_FILE,
64 hic.ICON_IMAGES_HOVER_FILE,
65 "Open previously built images",
66 self.my_images_button_clicked_cb)
67 settings_button = self.append_toolbar_button(self.toolbar,
68 "Settings",
69 hic.ICON_SETTINGS_DISPLAY_FILE,
70 hic.ICON_SETTINGS_HOVER_FILE,
71 "View additional build settings",
72 self.settings_button_clicked_cb)
73
74 self.config_top_button = self.add_onto_top_bar(self.toolbar)
75
76 self.gtable = gtk.Table(40, 40, True)
77 self.create_config_machine()
78 self.create_config_baseimg()
79 self.config_build_button = self.create_config_build_button()
80
81 def _remove_all_widget(self):
82 children = self.gtable.get_children() or []
83 for child in children:
84 self.gtable.remove(child)
85 children = self.box_group_area.get_children() or []
86 for child in children:
87 self.box_group_area.remove(child)
88 children = self.get_children() or []
89 for child in children:
90 self.remove(child)
91
92 def _pack_components(self, pack_config_build_button = False):
93 self._remove_all_widget()
94 self.pack_start(self.config_top_button, expand=False, fill=False)
95 self.pack_start(self.group_align, expand=True, fill=True)
96
97 self.box_group_area.pack_start(self.gtable, expand=True, fill=True)
98 if pack_config_build_button:
99 self.box_group_area.pack_end(self.config_build_button, expand=False, fill=False)
100 else:
101 box = gtk.HBox(False, 6)
102 box.show()
103 subbox = gtk.HBox(False, 0)
104 subbox.set_size_request(205, 49)
105 subbox.show()
106 box.add(subbox)
107 self.box_group_area.pack_end(box, False, False)
108
109 def show_machine(self):
110 self.progress_bar.reset()
111 self._pack_components(pack_config_build_button = False)
112 self.set_config_machine_layout(show_progress_bar = False)
113 self.show_all()
114
115 def update_progress_bar(self, title, fraction, status=None):
116 if self.stopping == False:
117 self.progress_bar.update(fraction)
118 self.progress_bar.set_text(title)
119 self.progress_bar.set_rcstyle(status)
120
121 def show_info_populating(self):
122 self._pack_components(pack_config_build_button = False)
123 self.set_config_machine_layout(show_progress_bar = True)
124 self.show_all()
125
126 def show_info_populated(self):
127 self.progress_bar.reset()
128 self._pack_components(pack_config_build_button = False)
129 self.set_config_machine_layout(show_progress_bar = False)
130 self.set_config_baseimg_layout()
131 self.show_all()
132
133 def show_baseimg_selected(self):
134 self.progress_bar.reset()
135 self._pack_components(pack_config_build_button = True)
136 self.set_config_machine_layout(show_progress_bar = False)
137 self.set_config_baseimg_layout()
138 self.show_all()
139 if self.builder.recipe_model.get_selected_image() == self.builder.recipe_model.__custom_image__:
140 self.just_bake_button.hide()
141
142 def add_warnings_bar(self):
143 #create the warnings bar shown when recipes parsing generates warnings
144 color = HobColors.KHAKI
145 warnings_bar = gtk.EventBox()
146 warnings_bar.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
147 warnings_bar.set_flags(gtk.CAN_DEFAULT)
148 warnings_bar.grab_default()
149
150 build_stop_tab = gtk.Table(10, 20, True)
151 warnings_bar.add(build_stop_tab)
152
153 icon = gtk.Image()
154 icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ALERT_FILE)
155 icon.set_from_pixbuf(icon_pix_buffer)
156 build_stop_tab.attach(icon, 0, 2, 0, 10)
157
158 label = gtk.Label()
159 label.set_alignment(0.0, 0.5)
160 warnings_nb = len(self.builder.parsing_warnings)
161 if warnings_nb == 1:
162 label.set_markup("<span size='x-large'><b>1 recipe parsing warning</b></span>")
163 else:
164 label.set_markup("<span size='x-large'><b>%s recipe parsing warnings</b></span>" % warnings_nb)
165 build_stop_tab.attach(label, 2, 12, 0, 10)
166
167 view_warnings_button = HobButton("View warnings")
168 view_warnings_button.connect('clicked', self.view_warnings_button_clicked_cb)
169 build_stop_tab.attach(view_warnings_button, 15, 19, 1, 9)
170
171 return warnings_bar
172
173 def disable_warnings_bar(self):
174 if self.builder.parsing_warnings:
175 if hasattr(self, 'warnings_bar'):
176 self.warnings_bar.hide_all()
177 self.builder.parsing_warnings = []
178
179 def create_config_machine(self):
180 self.machine_title = gtk.Label()
181 self.machine_title.set_alignment(0.0, 0.5)
182 mark = "<span %s>Select a machine</span>" % self.span_tag('x-large', 'bold')
183 self.machine_title.set_markup(mark)
184
185 self.machine_title_desc = gtk.Label()
186 self.machine_title_desc.set_alignment(0.0, 0.5)
187 mark = ("<span %s>Your selection is the profile of the target machine for which you"
188 " are building the image.\n</span>") % (self.span_tag('medium'))
189 self.machine_title_desc.set_markup(mark)
190
191 self.machine_combo = gtk.combo_box_new_text()
192 self.machine_combo.connect("changed", self.machine_combo_changed_cb)
193
194 icon_file = hic.ICON_LAYERS_DISPLAY_FILE
195 hover_file = hic.ICON_LAYERS_HOVER_FILE
196 self.layer_button = HobImageButton("Layers", "Add support for machines, software, etc.",
197 icon_file, hover_file)
198 self.layer_button.connect("clicked", self.layer_button_clicked_cb)
199
200 markup = "Layers are a powerful mechanism to extend the Yocto Project "
201 markup += "with your own functionality.\n"
202 markup += "For more on layers, check the <a href=\""
203 markup += "http://www.yoctoproject.org/docs/current/dev-manual/"
204 markup += "dev-manual.html#understanding-and-using-layers\">reference manual</a>."
205 self.layer_info_icon = HobInfoButton("<b>Layers</b>" + "*" + markup, self.get_parent())
206 self.progress_bar = HobProgressBar()
207 self.stop_button = HobAltButton("Stop")
208 self.stop_button.connect("clicked", self.stop_button_clicked_cb)
209 self.machine_separator = gtk.HSeparator()
210
211 def set_config_machine_layout(self, show_progress_bar = False):
212 self.gtable.attach(self.machine_title, 0, 40, 0, 4)
213 self.gtable.attach(self.machine_title_desc, 0, 40, 4, 6)
214 self.gtable.attach(self.machine_combo, 0, 12, 7, 10)
215 self.gtable.attach(self.layer_button, 14, 36, 7, 12)
216 self.gtable.attach(self.layer_info_icon, 36, 40, 7, 11)
217 if show_progress_bar:
218 #self.gtable.attach(self.progress_box, 0, 40, 15, 18)
219 self.gtable.attach(self.progress_bar, 0, 37, 15, 18)
220 self.gtable.attach(self.stop_button, 37, 40, 15, 18, 0, 0)
221 if self.builder.parsing_warnings:
222 self.warnings_bar = self.add_warnings_bar()
223 self.gtable.attach(self.warnings_bar, 0, 40, 14, 18)
224 self.warning_shift = 4
225 else:
226 self.warning_shift = 0
227 self.gtable.attach(self.machine_separator, 0, 40, 13, 14)
228
229 def create_config_baseimg(self):
230 self.image_title = gtk.Label()
231 self.image_title.set_alignment(0, 1.0)
232 mark = "<span %s>Select an image recipe</span>" % self.span_tag('x-large', 'bold')
233 self.image_title.set_markup(mark)
234
235 self.image_title_desc = gtk.Label()
236 self.image_title_desc.set_alignment(0, 0.5)
237
238 mark = ("<span %s>Image recipes are a starting point for the type of image you want. "
239 "You can build them as \n"
240 "they are or edit them to suit your needs.\n</span>") % self.span_tag('medium')
241 self.image_title_desc.set_markup(mark)
242
243 self.image_combo = gtk.combo_box_new_text()
244 self.image_combo.set_row_separator_func(self.combo_separator_func, None)
245 self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
246
247 self.image_desc = gtk.Label()
248 self.image_desc.set_alignment(0.0, 0.5)
249 self.image_desc.set_size_request(256, -1)
250 self.image_desc.set_justify(gtk.JUSTIFY_LEFT)
251 self.image_desc.set_line_wrap(True)
252
253 # button to view recipes
254 icon_file = hic.ICON_RCIPE_DISPLAY_FILE
255 hover_file = hic.ICON_RCIPE_HOVER_FILE
256 self.view_adv_configuration_button = HobImageButton("Advanced configuration",
257 "Select image types, package formats, etc",
258 icon_file, hover_file)
259 self.view_adv_configuration_button.connect("clicked", self.view_adv_configuration_button_clicked_cb)
260
261 self.image_separator = gtk.HSeparator()
262
263 def combo_separator_func(self, model, iter, user_data):
264 name = model.get_value(iter, 0)
265 if name == "--Separator--":
266 return True
267
268 def set_config_baseimg_layout(self):
269 self.gtable.attach(self.image_title, 0, 40, 15+self.warning_shift, 17+self.warning_shift)
270 self.gtable.attach(self.image_title_desc, 0, 40, 18+self.warning_shift, 22+self.warning_shift)
271 self.gtable.attach(self.image_combo, 0, 12, 23+self.warning_shift, 26+self.warning_shift)
272 self.gtable.attach(self.image_desc, 0, 12, 27+self.warning_shift, 33+self.warning_shift)
273 self.gtable.attach(self.view_adv_configuration_button, 14, 36, 23+self.warning_shift, 28+self.warning_shift)
274 self.gtable.attach(self.image_separator, 0, 40, 35+self.warning_shift, 36+self.warning_shift)
275
276 def create_config_build_button(self):
277 # Create the "Build packages" and "Build image" buttons at the bottom
278 button_box = gtk.HBox(False, 6)
279
280 # create button "Build image"
281 self.just_bake_button = HobButton("Build image")
282 self.just_bake_button.set_tooltip_text("Build the image recipe as it is")
283 self.just_bake_button.connect("clicked", self.just_bake_button_clicked_cb)
284 button_box.pack_end(self.just_bake_button, expand=False, fill=False)
285
286 # create button "Edit image recipe"
287 self.edit_image_button = HobAltButton("Edit image recipe")
288 self.edit_image_button.set_tooltip_text("Customize the recipes and packages to be included in your image")
289 self.edit_image_button.connect("clicked", self.edit_image_button_clicked_cb)
290 button_box.pack_end(self.edit_image_button, expand=False, fill=False)
291
292 return button_box
293
294 def stop_button_clicked_cb(self, button):
295 self.stopping = True
296 self.progress_bar.set_text("Stopping recipe parsing")
297 self.progress_bar.set_rcstyle("stop")
298 self.builder.cancel_parse_sync()
299
300 def view_warnings_button_clicked_cb(self, button):
301 self.builder.show_warning_dialog()
302
303 def machine_combo_changed_idle_cb(self):
304 self.builder.window.set_cursor(None)
305
306 def machine_combo_changed_cb(self, machine_combo):
307 self.stopping = False
308 self.builder.parsing_warnings = []
309 combo_item = machine_combo.get_active_text()
310 if not combo_item or combo_item == self.__dummy_machine__:
311 return
312
313 self.builder.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
314 self.builder.wait(0.1) #wait for combo and cursor to update
315
316 # remove __dummy_machine__ item from the store list after first user selection
317 # because it is no longer valid
318 combo_store = machine_combo.get_model()
319 if len(combo_store) and (combo_store[0][0] == self.__dummy_machine__):
320 machine_combo.remove_text(0)
321
322 self.builder.configuration.curr_mach = combo_item
323 if self.machine_combo_changed_by_manual:
324 self.builder.configuration.clear_selection()
325 # reset machine_combo_changed_by_manual
326 self.machine_combo_changed_by_manual = True
327
328 self.builder.configuration.selected_image = None
329
330 # Do reparse recipes
331 self.builder.populate_recipe_package_info_async()
332
333 glib.idle_add(self.machine_combo_changed_idle_cb)
334
335 def update_machine_combo(self):
336 self.disable_warnings_bar()
337 all_machines = [self.__dummy_machine__] + self.builder.parameters.all_machines
338
339 model = self.machine_combo.get_model()
340 model.clear()
341 for machine in all_machines:
342 self.machine_combo.append_text(machine)
343 self.machine_combo.set_active(0)
344
345 def switch_machine_combo(self):
346 self.disable_warnings_bar()
347 self.machine_combo_changed_by_manual = False
348 model = self.machine_combo.get_model()
349 active = 0
350 while active < len(model):
351 if model[active][0] == self.builder.configuration.curr_mach:
352 self.machine_combo.set_active(active)
353 return
354 active += 1
355
356 if model[0][0] != self.__dummy_machine__:
357 self.machine_combo.insert_text(0, self.__dummy_machine__)
358
359 self.machine_combo.set_active(0)
360
361 def update_image_desc(self):
362 desc = ""
363 selected_image = self.image_combo.get_active_text()
364 if selected_image and selected_image in self.builder.recipe_model.pn_path.keys():
365 image_path = self.builder.recipe_model.pn_path[selected_image]
366 image_iter = self.builder.recipe_model.get_iter(image_path)
367 desc = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_DESC)
368
369 mark = ("<span %s>%s</span>\n") % (self.span_tag('small'), desc)
370 self.image_desc.set_markup(mark)
371
372 def image_combo_changed_idle_cb(self, selected_image, selected_recipes, selected_packages):
373 self.builder.update_recipe_model(selected_image, selected_recipes)
374 self.builder.update_package_model(selected_packages)
375 self.builder.window_sensitive(True)
376
377 def image_combo_changed_cb(self, combo):
378 self.builder.window_sensitive(False)
379 selected_image = self.image_combo.get_active_text()
380 if selected_image == self.__custom_image__:
381 topdir = self.builder.get_topdir()
382 images_dir = topdir + "/recipes/images/custom/"
383 self.builder.ensure_dir(images_dir)
384
385 dialog = RetrieveImageDialog(images_dir, "Select from my image recipes",
386 self.builder, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
387 response = dialog.run()
388 if response == gtk.RESPONSE_OK:
389 image_name = dialog.get_filename()
390 head, tail = os.path.split(image_name)
391 selected_image = os.path.splitext(tail)[0]
392 self.custom_image_selected = selected_image
393 self.update_image_combo(self.builder.recipe_model, selected_image)
394 else:
395 selected_image = self.__dummy_image__
396 self.update_image_combo(self.builder.recipe_model, None)
397 dialog.destroy()
398 else:
399 if self.custom_image_selected:
400 self.custom_image_selected = None
401 self.update_image_combo(self.builder.recipe_model, selected_image)
402
403 if not selected_image or (selected_image == self.__dummy_image__):
404 self.builder.window_sensitive(True)
405 self.just_bake_button.hide()
406 self.edit_image_button.hide()
407 return
408
409 # remove __dummy_image__ item from the store list after first user selection
410 # because it is no longer valid
411 combo_store = combo.get_model()
412 if len(combo_store) and (combo_store[0][0] == self.__dummy_image__):
413 combo.remove_text(0)
414
415 self.builder.customized = False
416
417 selected_recipes = []
418
419 image_path = self.builder.recipe_model.pn_path[selected_image]
420 image_iter = self.builder.recipe_model.get_iter(image_path)
421 selected_packages = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_INSTALL).split()
422 self.update_image_desc()
423
424 self.builder.recipe_model.reset()
425 self.builder.package_model.reset()
426
427 self.show_baseimg_selected()
428
429 if selected_image == self.builder.recipe_model.__custom_image__:
430 self.just_bake_button.hide()
431
432 glib.idle_add(self.image_combo_changed_idle_cb, selected_image, selected_recipes, selected_packages)
433
434 def _image_combo_connect_signal(self):
435 if not self.image_combo_id:
436 self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
437
438 def _image_combo_disconnect_signal(self):
439 if self.image_combo_id:
440 self.image_combo.disconnect(self.image_combo_id)
441 self.image_combo_id = None
442
443 def update_image_combo(self, recipe_model, selected_image):
444 # Update the image combo according to the images in the recipe_model
445 # populate image combo
446 filter = {RecipeListModel.COL_TYPE : ['image']}
447 image_model = recipe_model.tree_model(filter)
448 image_model.set_sort_column_id(recipe_model.COL_NAME, gtk.SORT_ASCENDING)
449 active = 0
450 cnt = 0
451
452 white_pattern = []
453 if self.builder.parameters.image_white_pattern:
454 for i in self.builder.parameters.image_white_pattern.split():
455 white_pattern.append(re.compile(i))
456
457 black_pattern = []
458 if self.builder.parameters.image_black_pattern:
459 for i in self.builder.parameters.image_black_pattern.split():
460 black_pattern.append(re.compile(i))
461 black_pattern.append(re.compile("hob-image"))
462 black_pattern.append(re.compile("edited(-[0-9]*)*.bb$"))
463
464 it = image_model.get_iter_first()
465 self._image_combo_disconnect_signal()
466 model = self.image_combo.get_model()
467 model.clear()
468 # Set a indicator text to combo store when first open
469 if not selected_image:
470 self.image_combo.append_text(self.__dummy_image__)
471 cnt = cnt + 1
472
473 self.image_combo.append_text(self.__custom_image__)
474 self.image_combo.append_text("--Separator--")
475 cnt = cnt + 2
476
477 topdir = self.builder.get_topdir()
478 # append and set active
479 while it:
480 path = image_model.get_path(it)
481 it = image_model.iter_next(it)
482 image_name = image_model[path][recipe_model.COL_NAME]
483 if image_name == self.builder.recipe_model.__custom_image__:
484 continue
485
486 if black_pattern:
487 allow = True
488 for pattern in black_pattern:
489 if pattern.search(image_name):
490 allow = False
491 break
492 elif white_pattern:
493 allow = False
494 for pattern in white_pattern:
495 if pattern.search(image_name):
496 allow = True
497 break
498 else:
499 allow = True
500
501 file_name = image_model[path][recipe_model.COL_FILE]
502 if file_name and topdir in file_name:
503 allow = False
504
505 if allow:
506 self.image_combo.append_text(image_name)
507 if image_name == selected_image:
508 active = cnt
509 cnt = cnt + 1
510 self.image_combo.append_text(self.builder.recipe_model.__custom_image__)
511
512 if selected_image == self.builder.recipe_model.__custom_image__:
513 active = cnt
514
515 if self.custom_image_selected:
516 self.image_combo.append_text("--Separator--")
517 self.image_combo.append_text(self.custom_image_selected)
518 cnt = cnt + 2
519 if self.custom_image_selected == selected_image:
520 active = cnt
521
522 self.image_combo.set_active(active)
523
524 if active != 0:
525 self.show_baseimg_selected()
526
527 self._image_combo_connect_signal()
528
529 def layer_button_clicked_cb(self, button):
530 # Create a layer selection dialog
531 self.builder.show_layer_selection_dialog()
532
533 def view_adv_configuration_button_clicked_cb(self, button):
534 # Create an advanced settings dialog
535 response, settings_changed = self.builder.show_adv_settings_dialog()
536 if not response:
537 return
538 if settings_changed:
539 self.builder.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
540 self.builder.wait(0.1) #wait for adv_settings_dialog to terminate
541 self.builder.reparse_post_adv_settings()
542 self.builder.window.set_cursor(None)
543
544 def just_bake_button_clicked_cb(self, button):
545 self.builder.parsing_warnings = []
546 self.builder.just_bake()
547
548 def edit_image_button_clicked_cb(self, button):
549 self.builder.set_base_image()
550 self.builder.show_recipes()
551
552 def my_images_button_clicked_cb(self, button):
553 self.builder.show_load_my_images_dialog()
554
555 def settings_button_clicked_cb(self, button):
556 # Create an advanced settings dialog
557 response, settings_changed = self.builder.show_simple_settings_dialog()
558 if not response:
559 return
560 if settings_changed:
561 self.builder.reparse_post_adv_settings()
diff --git a/bitbake/lib/bb/ui/crumbs/imagedetailspage.py b/bitbake/lib/bb/ui/crumbs/imagedetailspage.py
new file mode 100755
index 0000000000..352e9489fd
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/imagedetailspage.py
@@ -0,0 +1,669 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gobject
24import gtk
25from bb.ui.crumbs.hobcolor import HobColors
26from bb.ui.crumbs.hobwidget import hic, HobViewTable, HobAltButton, HobButton
27from bb.ui.crumbs.hobpages import HobPage
28import subprocess
29from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
30from bb.ui.crumbs.hig.saveimagedialog import SaveImageDialog
31
32#
33# ImageDetailsPage
34#
35class ImageDetailsPage (HobPage):
36
37 class DetailBox (gtk.EventBox):
38 def __init__(self, widget = None, varlist = None, vallist = None, icon = None, button = None, button2=None, color = HobColors.LIGHT_GRAY):
39 gtk.EventBox.__init__(self)
40
41 # set color
42 style = self.get_style().copy()
43 style.bg[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(color, False, False)
44 self.set_style(style)
45
46 self.row = gtk.Table(1, 2, False)
47 self.row.set_border_width(10)
48 self.add(self.row)
49
50 total_rows = 0
51 if widget:
52 total_rows = 10
53 if varlist and vallist:
54 # pack the icon and the text on the left
55 total_rows += len(varlist)
56 self.table = gtk.Table(total_rows, 20, True)
57 self.table.set_row_spacings(6)
58 self.table.set_size_request(100, -1)
59 self.row.attach(self.table, 0, 1, 0, 1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL)
60
61 colid = 0
62 rowid = 0
63 self.line_widgets = {}
64 if icon:
65 self.table.attach(icon, colid, colid + 2, 0, 1)
66 colid = colid + 2
67 if widget:
68 self.table.attach(widget, colid, 20, 0, 10)
69 rowid = 10
70 if varlist and vallist:
71 for row in range(rowid, total_rows):
72 index = row - rowid
73 self.line_widgets[varlist[index]] = self.text2label(varlist[index], vallist[index])
74 self.table.attach(self.line_widgets[varlist[index]], colid, 20, row, row + 1)
75 # pack the button on the right
76 if button:
77 self.bbox = gtk.VBox()
78 self.bbox.pack_start(button, expand=True, fill=False)
79 if button2:
80 self.bbox.pack_start(button2, expand=True, fill=False)
81 self.bbox.set_size_request(150,-1)
82 self.row.attach(self.bbox, 1, 2, 0, 1, xoptions=gtk.FILL, yoptions=gtk.EXPAND)
83
84 def update_line_widgets(self, variable, value):
85 if len(self.line_widgets) == 0:
86 return
87 if not isinstance(self.line_widgets[variable], gtk.Label):
88 return
89 self.line_widgets[variable].set_markup(self.format_line(variable, value))
90
91 def wrap_line(self, inputs):
92 # wrap the long text of inputs
93 wrap_width_chars = 75
94 outputs = ""
95 tmps = inputs
96 less_chars = len(inputs)
97 while (less_chars - wrap_width_chars) > 0:
98 less_chars -= wrap_width_chars
99 outputs += tmps[:wrap_width_chars] + "\n "
100 tmps = inputs[less_chars:]
101 outputs += tmps
102 return outputs
103
104 def format_line(self, variable, value):
105 wraped_value = self.wrap_line(value)
106 markup = "<span weight=\'bold\'>%s</span>" % variable
107 markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % wraped_value
108 return markup
109
110 def text2label(self, variable, value):
111 # append the name:value to the left box
112 # such as "Name: hob-core-minimal-variant-2011-12-15-beagleboard"
113 label = gtk.Label()
114 label.set_alignment(0.0, 0.5)
115 label.set_markup(self.format_line(variable, value))
116 return label
117
118 class BuildDetailBox (gtk.EventBox):
119 def __init__(self, varlist = None, vallist = None, icon = None, color = HobColors.LIGHT_GRAY):
120 gtk.EventBox.__init__(self)
121
122 # set color
123 style = self.get_style().copy()
124 style.bg[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(color, False, False)
125 self.set_style(style)
126
127 self.hbox = gtk.HBox()
128 self.hbox.set_border_width(10)
129 self.add(self.hbox)
130
131 total_rows = 0
132 if varlist and vallist:
133 # pack the icon and the text on the left
134 total_rows += len(varlist)
135 self.table = gtk.Table(total_rows, 20, True)
136 self.table.set_row_spacings(6)
137 self.table.set_size_request(100, -1)
138 self.hbox.pack_start(self.table, expand=True, fill=True, padding=15)
139
140 colid = 0
141 rowid = 0
142 self.line_widgets = {}
143 if icon:
144 self.table.attach(icon, colid, colid + 2, 0, 1)
145 colid = colid + 2
146 if varlist and vallist:
147 for row in range(rowid, total_rows):
148 index = row - rowid
149 self.line_widgets[varlist[index]] = self.text2label(varlist[index], vallist[index])
150 self.table.attach(self.line_widgets[varlist[index]], colid, 20, row, row + 1)
151
152 def update_line_widgets(self, variable, value):
153 if len(self.line_widgets) == 0:
154 return
155 if not isinstance(self.line_widgets[variable], gtk.Label):
156 return
157 self.line_widgets[variable].set_markup(self.format_line(variable, value))
158
159 def wrap_line(self, inputs):
160 # wrap the long text of inputs
161 wrap_width_chars = 75
162 outputs = ""
163 tmps = inputs
164 less_chars = len(inputs)
165 while (less_chars - wrap_width_chars) > 0:
166 less_chars -= wrap_width_chars
167 outputs += tmps[:wrap_width_chars] + "\n "
168 tmps = inputs[less_chars:]
169 outputs += tmps
170 return outputs
171
172 def format_line(self, variable, value):
173 wraped_value = self.wrap_line(value)
174 markup = "<span weight=\'bold\'>%s</span>" % variable
175 markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % wraped_value
176 return markup
177
178 def text2label(self, variable, value):
179 # append the name:value to the left box
180 # such as "Name: hob-core-minimal-variant-2011-12-15-beagleboard"
181 label = gtk.Label()
182 label.set_alignment(0.0, 0.5)
183 label.set_markup(self.format_line(variable, value))
184 return label
185
186 def __init__(self, builder):
187 super(ImageDetailsPage, self).__init__(builder, "Image details")
188
189 self.image_store = []
190 self.button_ids = {}
191 self.details_bottom_buttons = gtk.HBox(False, 6)
192 self.image_saved = False
193 self.create_visual_elements()
194 self.name_field_template = ""
195 self.description_field_template = ""
196
197 def create_visual_elements(self):
198 # create visual elements
199 # create the toolbar
200 self.toolbar = gtk.Toolbar()
201 self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
202 self.toolbar.set_style(gtk.TOOLBAR_BOTH)
203
204 my_images_button = self.append_toolbar_button(self.toolbar,
205 "Images",
206 hic.ICON_IMAGES_DISPLAY_FILE,
207 hic.ICON_IMAGES_HOVER_FILE,
208 "Open previously built images",
209 self.my_images_button_clicked_cb)
210 settings_button = self.append_toolbar_button(self.toolbar,
211 "Settings",
212 hic.ICON_SETTINGS_DISPLAY_FILE,
213 hic.ICON_SETTINGS_HOVER_FILE,
214 "View additional build settings",
215 self.settings_button_clicked_cb)
216
217 self.details_top_buttons = self.add_onto_top_bar(self.toolbar)
218
219 def _remove_all_widget(self):
220 children = self.get_children() or []
221 for child in children:
222 self.remove(child)
223 children = self.box_group_area.get_children() or []
224 for child in children:
225 self.box_group_area.remove(child)
226 children = self.details_bottom_buttons.get_children() or []
227 for child in children:
228 self.details_bottom_buttons.remove(child)
229
230 def show_page(self, step):
231 self.build_succeeded = (step == self.builder.IMAGE_GENERATED)
232 image_addr = self.builder.parameters.image_addr
233 image_names = self.builder.parameters.image_names
234 if self.build_succeeded:
235 machine = self.builder.configuration.curr_mach
236 base_image = self.builder.recipe_model.get_selected_image()
237 layers = self.builder.configuration.layers
238 pkg_num = "%s" % len(self.builder.package_model.get_selected_packages())
239 log_file = self.builder.current_logfile
240 else:
241 pkg_num = "N/A"
242 log_file = None
243
244 # remove
245 for button_id, button in self.button_ids.items():
246 button.disconnect(button_id)
247 self._remove_all_widget()
248
249 # repack
250 self.pack_start(self.details_top_buttons, expand=False, fill=False)
251 self.pack_start(self.group_align, expand=True, fill=True)
252
253 self.build_result = None
254 if self.image_saved or (self.build_succeeded and self.builder.current_step == self.builder.IMAGE_GENERATING):
255 # building is the previous step
256 icon = gtk.Image()
257 pixmap_path = hic.ICON_INDI_CONFIRM_FILE
258 color = HobColors.RUNNING
259 pix_buffer = gtk.gdk.pixbuf_new_from_file(pixmap_path)
260 icon.set_from_pixbuf(pix_buffer)
261 varlist = [""]
262 if self.image_saved:
263 vallist = ["Your image recipe has been saved"]
264 else:
265 vallist = ["Your image is ready"]
266 self.build_result = self.BuildDetailBox(varlist=varlist, vallist=vallist, icon=icon, color=color)
267 self.box_group_area.pack_start(self.build_result, expand=False, fill=False)
268
269 self.buttonlist = ["Build new image", "Save image recipe", "Run image", "Deploy image"]
270
271 # Name
272 self.image_store = []
273 self.toggled_image = ""
274 default_image_size = 0
275 self.num_toggled = 0
276 i = 0
277 for image_name in image_names:
278 image_size = HobPage._size_to_string(os.stat(os.path.join(image_addr, image_name)).st_size)
279
280 image_attr = ("run" if (self.test_type_runnable(image_name) and self.test_mach_runnable(image_name)) else \
281 ("deploy" if self.test_deployable(image_name) else ""))
282 is_toggled = (image_attr != "")
283
284 if not self.toggled_image:
285 if i == (len(image_names) - 1):
286 is_toggled = True
287 if is_toggled:
288 default_image_size = image_size
289 self.toggled_image = image_name
290
291 split_stuff = image_name.split('.')
292 if "rootfs" in split_stuff:
293 image_type = image_name[(len(split_stuff[0]) + len(".rootfs") + 1):]
294 else:
295 image_type = image_name[(len(split_stuff[0]) + 1):]
296
297 self.image_store.append({'name': image_name,
298 'type': image_type,
299 'size': image_size,
300 'is_toggled': is_toggled,
301 'action_attr': image_attr,})
302
303 i = i + 1
304 self.num_toggled += is_toggled
305
306 is_runnable = self.create_bottom_buttons(self.buttonlist, self.toggled_image)
307
308 # Generated image files info
309 varlist = ["Name: ", "Files created: ", "Directory: "]
310 vallist = []
311
312 vallist.append(image_name.split('.')[0])
313 vallist.append(', '.join(fileitem['type'] for fileitem in self.image_store))
314 vallist.append(image_addr)
315
316 view_files_button = HobAltButton("View files")
317 view_files_button.connect("clicked", self.view_files_clicked_cb, image_addr)
318 view_files_button.set_tooltip_text("Open the directory containing the image files")
319 open_log_button = None
320 if log_file:
321 open_log_button = HobAltButton("Open log")
322 open_log_button.connect("clicked", self.open_log_clicked_cb, log_file)
323 open_log_button.set_tooltip_text("Open the build's log file")
324 self.image_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=view_files_button, button2=open_log_button)
325 self.box_group_area.pack_start(self.image_detail, expand=False, fill=True)
326
327 # The default kernel box for the qemu images
328 self.sel_kernel = ""
329 self.kernel_detail = None
330 if 'qemu' in image_name:
331 self.sel_kernel = self.get_kernel_file_name()
332
333 # varlist = ["Kernel: "]
334 # vallist = []
335 # vallist.append(self.sel_kernel)
336
337 # change_kernel_button = HobAltButton("Change")
338 # change_kernel_button.connect("clicked", self.change_kernel_cb)
339 # change_kernel_button.set_tooltip_text("Change qemu kernel file")
340 # self.kernel_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=change_kernel_button)
341 # self.box_group_area.pack_start(self.kernel_detail, expand=True, fill=True)
342
343 # Machine, Image recipe and Layers
344 layer_num_limit = 15
345 varlist = ["Machine: ", "Image recipe: ", "Layers: "]
346 vallist = []
347 self.setting_detail = None
348 if self.build_succeeded:
349 vallist.append(machine)
350 if self.builder.recipe_model.is_custom_image():
351 if self.builder.configuration.initial_selected_image == self.builder.recipe_model.__custom_image__:
352 base_image ="New image recipe"
353 else:
354 base_image = self.builder.configuration.initial_selected_image + " (edited)"
355 vallist.append(base_image)
356 i = 0
357 for layer in layers:
358 if i > layer_num_limit:
359 break
360 varlist.append(" - ")
361 i += 1
362 vallist.append("")
363 i = 0
364 for layer in layers:
365 if i > layer_num_limit:
366 break
367 elif i == layer_num_limit:
368 vallist.append("and more...")
369 else:
370 vallist.append(layer)
371 i += 1
372
373 edit_config_button = HobAltButton("Edit configuration")
374 edit_config_button.set_tooltip_text("Edit machine and image recipe")
375 edit_config_button.connect("clicked", self.edit_config_button_clicked_cb)
376 self.setting_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=edit_config_button)
377 self.box_group_area.pack_start(self.setting_detail, expand=True, fill=True)
378
379 # Packages included, and Total image size
380 varlist = ["Packages included: ", "Total image size: "]
381 vallist = []
382 vallist.append(pkg_num)
383 vallist.append(default_image_size)
384 self.builder.configuration.image_size = default_image_size
385 self.builder.configuration.image_packages = self.builder.configuration.selected_packages
386 if self.build_succeeded:
387 edit_packages_button = HobAltButton("Edit packages")
388 edit_packages_button.set_tooltip_text("Edit the packages included in your image")
389 edit_packages_button.connect("clicked", self.edit_packages_button_clicked_cb)
390 else: # get to this page from "My images"
391 edit_packages_button = None
392 self.package_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=edit_packages_button)
393 self.box_group_area.pack_start(self.package_detail, expand=True, fill=True)
394
395 # pack the buttons at the bottom, at this time they are already created.
396 if self.build_succeeded:
397 self.box_group_area.pack_end(self.details_bottom_buttons, expand=False, fill=False)
398 else: # for "My images" page
399 self.details_separator = gtk.HSeparator()
400 self.box_group_area.pack_start(self.details_separator, expand=False, fill=False)
401 self.box_group_area.pack_start(self.details_bottom_buttons, expand=False, fill=False)
402
403 self.show_all()
404 if self.kernel_detail and (not is_runnable):
405 self.kernel_detail.hide()
406 self.image_saved = False
407
408 def view_files_clicked_cb(self, button, image_addr):
409 subprocess.call("xdg-open /%s" % image_addr, shell=True)
410
411 def open_log_clicked_cb(self, button, log_file):
412 if log_file:
413 log_file = "file:///" + log_file
414 gtk.show_uri(screen=button.get_screen(), uri=log_file, timestamp=0)
415
416 def refresh_package_detail_box(self, image_size):
417 self.package_detail.update_line_widgets("Total image size: ", image_size)
418
419 def test_type_runnable(self, image_name):
420 type_runnable = False
421 for t in self.builder.parameters.runnable_image_types:
422 if image_name.endswith(t):
423 type_runnable = True
424 break
425 return type_runnable
426
427 def test_mach_runnable(self, image_name):
428 mach_runnable = False
429 for t in self.builder.parameters.runnable_machine_patterns:
430 if t in image_name:
431 mach_runnable = True
432 break
433 return mach_runnable
434
435 def test_deployable(self, image_name):
436 if self.builder.configuration.curr_mach.startswith("qemu"):
437 return False
438 deployable = False
439 for t in self.builder.parameters.deployable_image_types:
440 if image_name.endswith(t):
441 deployable = True
442 break
443 return deployable
444
445 def get_kernel_file_name(self, kernel_addr=""):
446 kernel_name = ""
447
448 if not kernel_addr:
449 kernel_addr = self.builder.parameters.image_addr
450
451 files = [f for f in os.listdir(kernel_addr) if f[0] <> '.']
452 for check_file in files:
453 if check_file.endswith(".bin"):
454 name_splits = check_file.split(".")[0]
455 if self.builder.parameters.kernel_image_type in name_splits.split("-"):
456 kernel_name = check_file
457 break
458
459 return kernel_name
460
461 def show_builded_images_dialog(self, widget, primary_action=""):
462 title = primary_action if primary_action else "Your builded images"
463 dialog = CrumbsDialog(title, self.builder,
464 gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
465 dialog.set_border_width(12)
466
467 label = gtk.Label()
468 label.set_use_markup(True)
469 label.set_alignment(0.0, 0.5)
470 label.set_padding(12,0)
471 if primary_action == "Run image":
472 label.set_markup("<span font_desc='12'>Select the image file you want to run:</span>")
473 elif primary_action == "Deploy image":
474 label.set_markup("<span font_desc='12'>Select the image file you want to deploy:</span>")
475 else:
476 label.set_markup("<span font_desc='12'>Select the image file you want to %s</span>" % primary_action)
477 dialog.vbox.pack_start(label, expand=False, fill=False)
478
479 # filter created images as action attribution (deploy or run)
480 action_attr = ""
481 action_images = []
482 for fileitem in self.image_store:
483 action_attr = fileitem['action_attr']
484 if (action_attr == 'run' and primary_action == "Run image") \
485 or (action_attr == 'deploy' and primary_action == "Deploy image"):
486 action_images.append(fileitem)
487
488 # pack the corresponding 'runnable' or 'deploy' radio_buttons, if there has no more than one file.
489 # assume that there does not both have 'deploy' and 'runnable' files in the same building result
490 # in possible as design.
491 curr_row = 0
492 rows = (len(action_images)) if len(action_images) < 10 else 10
493 table = gtk.Table(rows, 10, True)
494 table.set_row_spacings(6)
495 table.set_col_spacing(0, 12)
496 table.set_col_spacing(5, 12)
497
498 sel_parent_btn = None
499 for fileitem in action_images:
500 sel_btn = gtk.RadioButton(sel_parent_btn, fileitem['type'])
501 sel_parent_btn = sel_btn if not sel_parent_btn else sel_parent_btn
502 sel_btn.set_active(fileitem['is_toggled'])
503 sel_btn.connect('toggled', self.table_selected_cb, fileitem)
504 if curr_row < 10:
505 table.attach(sel_btn, 0, 4, curr_row, curr_row + 1, xpadding=24)
506 else:
507 table.attach(sel_btn, 5, 9, curr_row - 10, curr_row - 9, xpadding=24)
508 curr_row += 1
509
510 dialog.vbox.pack_start(table, expand=False, fill=False, padding=6)
511
512 button = dialog.add_button("Cancel", gtk.RESPONSE_CANCEL)
513 HobAltButton.style_button(button)
514
515 if primary_action:
516 button = dialog.add_button(primary_action, gtk.RESPONSE_YES)
517 HobButton.style_button(button)
518
519 dialog.show_all()
520
521 response = dialog.run()
522 dialog.destroy()
523
524 if response != gtk.RESPONSE_YES:
525 return
526
527 for fileitem in self.image_store:
528 if fileitem['is_toggled']:
529 if fileitem['action_attr'] == 'run':
530 self.builder.runqemu_image(fileitem['name'], self.sel_kernel)
531 elif fileitem['action_attr'] == 'deploy':
532 self.builder.deploy_image(fileitem['name'])
533
534 def table_selected_cb(self, tbutton, image):
535 image['is_toggled'] = tbutton.get_active()
536 if image['is_toggled']:
537 self.toggled_image = image['name']
538
539 def change_kernel_cb(self, widget):
540 kernel_path = self.builder.show_load_kernel_dialog()
541 if kernel_path and self.kernel_detail:
542 import os.path
543 self.sel_kernel = os.path.basename(kernel_path)
544 markup = self.kernel_detail.format_line("Kernel: ", self.sel_kernel)
545 label = ((self.kernel_detail.get_children()[0]).get_children()[0]).get_children()[0]
546 label.set_markup(markup)
547
548 def create_bottom_buttons(self, buttonlist, image_name):
549 # Create the buttons at the bottom
550 created = False
551 packed = False
552 self.button_ids = {}
553 is_runnable = False
554
555 # create button "Deploy image"
556 name = "Deploy image"
557 if name in buttonlist and self.test_deployable(image_name):
558 deploy_button = HobButton('Deploy image')
559 #deploy_button.set_size_request(205, 49)
560 deploy_button.set_tooltip_text("Burn a live image to a USB drive or flash memory")
561 deploy_button.set_flags(gtk.CAN_DEFAULT)
562 button_id = deploy_button.connect("clicked", self.deploy_button_clicked_cb)
563 self.button_ids[button_id] = deploy_button
564 self.details_bottom_buttons.pack_end(deploy_button, expand=False, fill=False)
565 created = True
566 packed = True
567
568 name = "Run image"
569 if name in buttonlist and self.test_type_runnable(image_name) and self.test_mach_runnable(image_name):
570 if created == True:
571 # separator
572 #label = gtk.Label(" or ")
573 #self.details_bottom_buttons.pack_end(label, expand=False, fill=False)
574
575 # create button "Run image"
576 run_button = HobAltButton("Run image")
577 else:
578 # create button "Run image" as the primary button
579 run_button = HobButton("Run image")
580 #run_button.set_size_request(205, 49)
581 run_button.set_flags(gtk.CAN_DEFAULT)
582 packed = True
583 run_button.set_tooltip_text("Start up an image with qemu emulator")
584 button_id = run_button.connect("clicked", self.run_button_clicked_cb)
585 self.button_ids[button_id] = run_button
586 self.details_bottom_buttons.pack_end(run_button, expand=False, fill=False)
587 created = True
588 is_runnable = True
589
590 name = "Save image recipe"
591 if name in buttonlist and self.builder.recipe_model.is_custom_image():
592 save_button = HobAltButton("Save image recipe")
593 save_button.set_tooltip_text("Keep your changes saving them as an image recipe")
594 save_button.set_sensitive(not self.image_saved)
595 button_id = save_button.connect("clicked", self.save_button_clicked_cb)
596 self.button_ids[button_id] = save_button
597 self.details_bottom_buttons.pack_end(save_button, expand=False, fill=False)
598
599 name = "Build new image"
600 if name in buttonlist:
601 # create button "Build new image"
602 if packed:
603 build_new_button = HobAltButton("Build new image")
604 else:
605 build_new_button = HobButton("Build new image")
606 build_new_button.set_flags(gtk.CAN_DEFAULT)
607 #build_new_button.set_size_request(205, 49)
608 self.details_bottom_buttons.pack_end(build_new_button, expand=False, fill=False)
609 build_new_button.set_tooltip_text("Create a new image from scratch")
610 button_id = build_new_button.connect("clicked", self.build_new_button_clicked_cb)
611 self.button_ids[button_id] = build_new_button
612
613 return is_runnable
614
615 def deploy_button_clicked_cb(self, button):
616 if self.toggled_image:
617 if self.num_toggled > 1:
618 self.set_sensitive(False)
619 self.show_builded_images_dialog(None, "Deploy image")
620 self.set_sensitive(True)
621 else:
622 self.builder.deploy_image(self.toggled_image)
623
624 def run_button_clicked_cb(self, button):
625 if self.toggled_image:
626 if self.num_toggled > 1:
627 self.set_sensitive(False)
628 self.show_builded_images_dialog(None, "Run image")
629 self.set_sensitive(True)
630 else:
631 self.builder.runqemu_image(self.toggled_image, self.sel_kernel)
632
633 def save_button_clicked_cb(self, button):
634 topdir = self.builder.get_topdir()
635 images_dir = topdir + "/recipes/images/custom/"
636 self.builder.ensure_dir(images_dir)
637
638 self.name_field_template = self.builder.image_configuration_page.custom_image_selected
639 if self.name_field_template:
640 image_path = self.builder.recipe_model.pn_path[self.name_field_template]
641 image_iter = self.builder.recipe_model.get_iter(image_path)
642 self.description_field_template = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_DESC)
643 else:
644 self.name_field_template = ""
645
646 dialog = SaveImageDialog(images_dir, self.name_field_template, self.description_field_template,
647 "Save image recipe", self.builder, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
648 response = dialog.run()
649 dialog.destroy()
650
651 def build_new_button_clicked_cb(self, button):
652 self.builder.initiate_new_build_async()
653
654 def edit_config_button_clicked_cb(self, button):
655 self.builder.show_configuration()
656
657 def edit_packages_button_clicked_cb(self, button):
658 self.builder.show_packages()
659
660 def my_images_button_clicked_cb(self, button):
661 self.builder.show_load_my_images_dialog()
662
663 def settings_button_clicked_cb(self, button):
664 # Create an advanced settings dialog
665 response, settings_changed = self.builder.show_simple_settings_dialog()
666 if not response:
667 return
668 if settings_changed:
669 self.builder.reparse_post_adv_settings()
diff --git a/bitbake/lib/bb/ui/crumbs/packageselectionpage.py b/bitbake/lib/bb/ui/crumbs/packageselectionpage.py
new file mode 100755
index 0000000000..7c62b36e6b
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/packageselectionpage.py
@@ -0,0 +1,355 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import glib
25from bb.ui.crumbs.hobcolor import HobColors
26from bb.ui.crumbs.hobwidget import HobViewTable, HobNotebook, HobAltButton, HobButton
27from bb.ui.crumbs.hoblistmodel import PackageListModel
28from bb.ui.crumbs.hobpages import HobPage
29
30#
31# PackageSelectionPage
32#
33class PackageSelectionPage (HobPage):
34
35 pages = [
36 {
37 'name' : 'Included packages',
38 'tooltip' : 'The packages currently included for your image',
39 'filter' : { PackageListModel.COL_INC : [True] },
40 'search' : 'Search packages by name',
41 'searchtip' : 'Enter a package name to find it',
42 'columns' : [{
43 'col_name' : 'Package name',
44 'col_id' : PackageListModel.COL_NAME,
45 'col_style': 'text',
46 'col_min' : 100,
47 'col_max' : 300,
48 'expand' : 'True'
49 }, {
50 'col_name' : 'Size',
51 'col_id' : PackageListModel.COL_SIZE,
52 'col_style': 'text',
53 'col_min' : 100,
54 'col_max' : 300,
55 'expand' : 'True'
56 }, {
57 'col_name' : 'Recipe',
58 'col_id' : PackageListModel.COL_RCP,
59 'col_style': 'text',
60 'col_min' : 100,
61 'col_max' : 250,
62 'expand' : 'True'
63 }, {
64 'col_name' : 'Brought in by (+others)',
65 'col_id' : PackageListModel.COL_BINB,
66 'col_style': 'binb',
67 'col_min' : 100,
68 'col_max' : 350,
69 'expand' : 'True'
70 }, {
71 'col_name' : 'Included',
72 'col_id' : PackageListModel.COL_INC,
73 'col_style': 'check toggle',
74 'col_min' : 100,
75 'col_max' : 100
76 }]
77 }, {
78 'name' : 'All packages',
79 'tooltip' : 'All packages that have been built',
80 'filter' : {},
81 'search' : 'Search packages by name',
82 'searchtip' : 'Enter a package name to find it',
83 'columns' : [{
84 'col_name' : 'Package name',
85 'col_id' : PackageListModel.COL_NAME,
86 'col_style': 'text',
87 'col_min' : 100,
88 'col_max' : 400,
89 'expand' : 'True'
90 }, {
91 'col_name' : 'Size',
92 'col_id' : PackageListModel.COL_SIZE,
93 'col_style': 'text',
94 'col_min' : 100,
95 'col_max' : 500,
96 'expand' : 'True'
97 }, {
98 'col_name' : 'Recipe',
99 'col_id' : PackageListModel.COL_RCP,
100 'col_style': 'text',
101 'col_min' : 100,
102 'col_max' : 250,
103 'expand' : 'True'
104 }, {
105 'col_name' : 'Included',
106 'col_id' : PackageListModel.COL_INC,
107 'col_style': 'check toggle',
108 'col_min' : 100,
109 'col_max' : 100
110 }]
111 }
112 ]
113
114 (INCLUDED,
115 ALL) = range(2)
116
117 def __init__(self, builder):
118 super(PackageSelectionPage, self).__init__(builder, "Edit packages")
119
120 # set invisible members
121 self.recipe_model = self.builder.recipe_model
122 self.package_model = self.builder.package_model
123
124 # create visual elements
125 self.create_visual_elements()
126
127 def included_clicked_cb(self, button):
128 self.ins.set_current_page(self.INCLUDED)
129
130 def create_visual_elements(self):
131 self.label = gtk.Label("Packages included: 0\nSelected packages size: 0 MB")
132 self.eventbox = self.add_onto_top_bar(self.label, 73)
133 self.pack_start(self.eventbox, expand=False, fill=False)
134 self.pack_start(self.group_align, expand=True, fill=True)
135
136 # set visible members
137 self.ins = HobNotebook()
138 self.tables = [] # we need to modify table when the dialog is shown
139
140 search_names = []
141 search_tips = []
142 # append the tab
143 for page in self.pages:
144 columns = page['columns']
145 name = page['name']
146 tab = HobViewTable(columns, name)
147 search_names.append(page['search'])
148 search_tips.append(page['searchtip'])
149 filter = page['filter']
150 sort_model = self.package_model.tree_model(filter, initial=True)
151 tab.set_model(sort_model)
152 tab.connect("toggled", self.table_toggled_cb, name)
153 tab.connect("button-release-event", self.button_click_cb)
154 tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include, filter)
155 self.ins.append_page(tab, page['name'], page['tooltip'])
156 self.tables.append(tab)
157
158 self.ins.set_entry(search_names, search_tips)
159 self.ins.search.connect("changed", self.search_entry_changed)
160
161 # add all into the dialog
162 self.box_group_area.pack_start(self.ins, expand=True, fill=True)
163
164 self.button_box = gtk.HBox(False, 6)
165 self.box_group_area.pack_start(self.button_box, expand=False, fill=False)
166
167 self.build_image_button = HobButton('Build image')
168 #self.build_image_button.set_size_request(205, 49)
169 self.build_image_button.set_tooltip_text("Build target image")
170 self.build_image_button.set_flags(gtk.CAN_DEFAULT)
171 self.build_image_button.grab_default()
172 self.build_image_button.connect("clicked", self.build_image_clicked_cb)
173 self.button_box.pack_end(self.build_image_button, expand=False, fill=False)
174
175 self.back_button = HobAltButton('Cancel')
176 self.back_button.connect("clicked", self.back_button_clicked_cb)
177 self.button_box.pack_end(self.back_button, expand=False, fill=False)
178
179 def search_entry_changed(self, entry):
180 text = entry.get_text()
181 if self.ins.search_focus:
182 self.ins.search_focus = False
183 elif self.ins.page_changed:
184 self.ins.page_change = False
185 self.filter_search(entry)
186 elif text not in self.ins.search_names:
187 self.filter_search(entry)
188
189 def filter_search(self, entry):
190 text = entry.get_text()
191 current_tab = self.ins.get_current_page()
192 filter = self.pages[current_tab]['filter']
193 filter[PackageListModel.COL_NAME] = text
194 self.tables[current_tab].set_model(self.package_model.tree_model(filter, search_data=text))
195 if self.package_model.filtered_nb == 0:
196 if not self.ins.get_nth_page(current_tab).top_bar:
197 self.ins.get_nth_page(current_tab).add_no_result_bar(entry)
198 self.ins.get_nth_page(current_tab).top_bar.set_no_show_all(True)
199 self.ins.get_nth_page(current_tab).top_bar.show()
200 self.ins.get_nth_page(current_tab).scroll.hide()
201 else:
202 if self.ins.get_nth_page(current_tab).top_bar:
203 self.ins.get_nth_page(current_tab).top_bar.hide()
204 self.ins.get_nth_page(current_tab).scroll.show()
205 if entry.get_text() == '':
206 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
207 else:
208 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, True)
209
210 def button_click_cb(self, widget, event):
211 path, col = widget.table_tree.get_cursor()
212 tree_model = widget.table_tree.get_model()
213 if path and col.get_title() != 'Included': # else activation is likely a removal
214 properties = {'binb': '' , 'name': '', 'size':'', 'recipe':'', 'files_list':''}
215 properties['binb'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_BINB)
216 properties['name'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_NAME)
217 properties['size'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_SIZE)
218 properties['recipe'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_RCP)
219 properties['files_list'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_FLIST)
220
221 self.builder.show_recipe_property_dialog(properties)
222
223 def open_log_clicked_cb(self, button, log_file):
224 if log_file:
225 log_file = "file:///" + log_file
226 gtk.show_uri(screen=button.get_screen(), uri=log_file, timestamp=0)
227
228 def show_page(self, log_file):
229 children = self.button_box.get_children() or []
230 for child in children:
231 self.button_box.remove(child)
232 # re-packed the buttons as request, add the 'open log' button if build success
233 self.button_box.pack_end(self.build_image_button, expand=False, fill=False)
234 if log_file:
235 open_log_button = HobAltButton("Open log")
236 open_log_button.connect("clicked", self.open_log_clicked_cb, log_file)
237 open_log_button.set_tooltip_text("Open the build's log file")
238 self.button_box.pack_end(open_log_button, expand=False, fill=False)
239 self.button_box.pack_end(self.back_button, expand=False, fill=False)
240 self.show_all()
241
242 def build_image_clicked_cb(self, button):
243 self.builder.parsing_warnings = []
244 self.builder.build_image()
245
246 def refresh_tables(self):
247 self.ins.reset_entry(self.ins.search, 0)
248 for tab in self.tables:
249 index = self.tables.index(tab)
250 filter = self.pages[index]['filter']
251 tab.set_model(self.package_model.tree_model(filter, initial=True))
252
253 def back_button_clicked_cb(self, button):
254 if self.builder.previous_step == self.builder.IMAGE_GENERATED:
255 self.builder.restore_initial_selected_packages()
256 self.refresh_selection()
257 self.builder.show_image_details()
258 else:
259 self.builder.show_configuration()
260 self.refresh_tables()
261
262 def refresh_selection(self):
263 self.builder.configuration.selected_packages = self.package_model.get_selected_packages()
264 self.builder.configuration.user_selected_packages = self.package_model.get_user_selected_packages()
265 selected_packages_num = len(self.builder.configuration.selected_packages)
266 selected_packages_size = self.package_model.get_packages_size()
267 selected_packages_size_str = HobPage._size_to_string(selected_packages_size)
268
269 if self.builder.configuration.image_packages == self.builder.configuration.selected_packages:
270 image_total_size_str = self.builder.configuration.image_size
271 else:
272 image_overhead_factor = self.builder.configuration.image_overhead_factor
273 image_rootfs_size = self.builder.configuration.image_rootfs_size / 1024 # image_rootfs_size is KB
274 image_extra_size = self.builder.configuration.image_extra_size / 1024 # image_extra_size is KB
275 base_size = image_overhead_factor * selected_packages_size
276 image_total_size = max(base_size, image_rootfs_size) + image_extra_size
277 if "zypper" in self.builder.configuration.selected_packages:
278 image_total_size += (51200 * 1024)
279 image_total_size_str = HobPage._size_to_string(image_total_size)
280
281 self.label.set_label("Packages included: %s\nSelected packages size: %s\nEstimated image size: %s" %
282 (selected_packages_num, selected_packages_size_str, image_total_size_str))
283 self.ins.show_indicator_icon("Included packages", selected_packages_num)
284
285 def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
286 if not self.package_model.path_included(path):
287 self.package_model.include_item(item_path=path, binb="User Selected")
288 else:
289 self.pre_fadeout_checkout_include(view_tree)
290 self.package_model.exclude_item(item_path=path)
291 self.render_fadeout(view_tree, cell)
292
293 self.refresh_selection()
294 if not self.builder.customized:
295 self.builder.customized = True
296 self.builder.set_base_image()
297 self.builder.configuration.selected_image = self.recipe_model.__custom_image__
298 self.builder.rcppkglist_populated()
299
300 self.builder.window_sensitive(True)
301 view_model = view_tree.get_model()
302 vpath = self.package_model.convert_path_to_vpath(view_model, path)
303 view_tree.set_cursor(vpath)
304
305 def table_toggled_cb(self, table, cell, view_path, toggled_columnid, view_tree, pagename):
306 # Click to include a package
307 self.builder.window_sensitive(False)
308 view_model = view_tree.get_model()
309 path = self.package_model.convert_vpath_to_path(view_model, view_path)
310 glib.idle_add(self.toggle_item_idle_cb, path, view_tree, cell, pagename)
311
312 def pre_fadeout_checkout_include(self, tree):
313 #after the fadeout the table will be sorted as before
314 self.sort_column_id = self.package_model.sort_column_id
315 self.sort_order = self.package_model.sort_order
316
317 self.package_model.resync_fadeout_column(self.package_model.get_iter_first())
318 # Check out a model which base on the column COL_FADE_INC,
319 # it's save the prev state of column COL_INC before do exclude_item
320 filter = { PackageListModel.COL_FADE_INC : [True]}
321 new_model = self.package_model.tree_model(filter, excluded_items_ahead=True)
322 tree.set_model(new_model)
323 tree.expand_all()
324
325 def get_excluded_rows(self, to_render_cells, model, it):
326 while it:
327 path = model.get_path(it)
328 prev_cell_is_active = model.get_value(it, PackageListModel.COL_FADE_INC)
329 curr_cell_is_active = model.get_value(it, PackageListModel.COL_INC)
330 if (prev_cell_is_active == True) and (curr_cell_is_active == False):
331 to_render_cells.append(path)
332 if model.iter_has_child(it):
333 self.get_excluded_rows(to_render_cells, model, model.iter_children(it))
334 it = model.iter_next(it)
335
336 return to_render_cells
337
338 def render_fadeout(self, tree, cell):
339 if (not cell) or (not tree):
340 return
341 to_render_cells = []
342 view_model = tree.get_model()
343 self.get_excluded_rows(to_render_cells, view_model, view_model.get_iter_first())
344
345 cell.fadeout(tree, 1000, to_render_cells)
346
347 def after_fadeout_checkin_include(self, table, ctrl, cell, tree, filter):
348 self.package_model.sort_column_id = self.sort_column_id
349 self.package_model.sort_order = self.sort_order
350 tree.set_model(self.package_model.tree_model(filter))
351 tree.expand_all()
352
353 def set_packages_curr_tab(self, curr_page):
354 self.ins.set_current_page(curr_page)
355
diff --git a/bitbake/lib/bb/ui/crumbs/persistenttooltip.py b/bitbake/lib/bb/ui/crumbs/persistenttooltip.py
new file mode 100644
index 0000000000..927c194292
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/persistenttooltip.py
@@ -0,0 +1,186 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gobject
22import gtk
23try:
24 import gconf
25except:
26 pass
27
28class PersistentTooltip(gtk.Window):
29 """
30 A tooltip which persists once shown until the user dismisses it with the Esc
31 key or by clicking the close button.
32
33 # FIXME: the PersistentTooltip should be disabled when the user clicks anywhere off
34 # it. We can't do this with focus-out-event becuase modal ensures we have focus?
35
36 markup: some Pango text markup to display in the tooltip
37 """
38 def __init__(self, markup, parent_win=None):
39 gtk.Window.__init__(self, gtk.WINDOW_POPUP)
40
41 # Inherit the system theme for a tooltip
42 style = gtk.rc_get_style_by_paths(gtk.settings_get_default(),
43 'gtk-tooltip', 'gtk-tooltip', gobject.TYPE_NONE)
44 self.set_style(style)
45
46 # The placement of the close button on the tip should reflect how the
47 # window manager of the users system places close buttons. Try to read
48 # the metacity gconf key to determine whether the close button is on the
49 # left or the right.
50 # In the case that we can't determine the users configuration we default
51 # to close buttons being on the right.
52 __button_right = True
53 try:
54 client = gconf.client_get_default()
55 order = client.get_string("/apps/metacity/general/button_layout")
56 if order and order.endswith(":"):
57 __button_right = False
58 except NameError:
59 pass
60
61 # We need to ensure we're only shown once
62 self.shown = False
63
64 # We don't want any WM decorations
65 self.set_decorated(False)
66 # We don't want to show in the taskbar or window switcher
67 self.set_skip_pager_hint(True)
68 self.set_skip_taskbar_hint(True)
69 # We must be modal to ensure we grab focus when presented from a gtk.Dialog
70 self.set_modal(True)
71
72 self.set_border_width(0)
73 self.set_position(gtk.WIN_POS_MOUSE)
74 self.set_opacity(0.95)
75
76 # Ensure a reasonable minimum size
77 self.set_geometry_hints(self, 100, 50)
78
79 # Set this window as a transient window for parent(main window)
80 if parent_win:
81 self.set_transient_for(parent_win)
82 self.set_destroy_with_parent(True)
83 # Draw our label and close buttons
84 hbox = gtk.HBox(False, 0)
85 hbox.show()
86 self.add(hbox)
87
88 img = gtk.Image()
89 img.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_BUTTON)
90
91 self.button = gtk.Button()
92 self.button.set_image(img)
93 self.button.connect("clicked", self._dismiss_cb)
94 self.button.set_flags(gtk.CAN_DEFAULT)
95 self.button.grab_focus()
96 self.button.show()
97 vbox = gtk.VBox(False, 0)
98 vbox.show()
99 vbox.pack_start(self.button, False, False, 0)
100 if __button_right:
101 hbox.pack_end(vbox, True, True, 0)
102 else:
103 hbox.pack_start(vbox, True, True, 0)
104
105 self.set_default(self.button)
106
107 bin = gtk.HBox(True, 6)
108 bin.set_border_width(6)
109 bin.show()
110 self.label = gtk.Label()
111 self.label.set_line_wrap(True)
112 # We want to match the colours of the normal tooltips, as dictated by
113 # the users gtk+-2.0 theme, wherever possible - on some systems this
114 # requires explicitly setting a fg_color for the label which matches the
115 # tooltip_fg_color
116 settings = gtk.settings_get_default()
117 colours = settings.get_property('gtk-color-scheme').split('\n')
118 # remove any empty lines, there's likely to be a trailing one after
119 # calling split on a dictionary-like string
120 colours = filter(None, colours)
121 for col in colours:
122 item, val = col.split(': ')
123 if item == 'tooltip_fg_color':
124 style = self.label.get_style()
125 style.fg[gtk.STATE_NORMAL] = gtk.gdk.color_parse(val)
126 self.label.set_style(style)
127 break # we only care for the tooltip_fg_color
128
129 self.label.set_markup(markup)
130 self.label.show()
131 bin.add(self.label)
132 hbox.pack_end(bin, True, True, 6)
133
134 # add the original URL display for user reference
135 if 'a href' in markup:
136 hbox.set_tooltip_text(self.get_markup_url(markup))
137 hbox.show()
138
139 self.connect("key-press-event", self._catch_esc_cb)
140
141 """
142 Callback when the PersistentTooltip's close button is clicked.
143 Hides the PersistentTooltip.
144 """
145 def _dismiss_cb(self, button):
146 self.hide()
147 return True
148
149 """
150 Callback when the Esc key is detected. Hides the PersistentTooltip.
151 """
152 def _catch_esc_cb(self, widget, event):
153 keyname = gtk.gdk.keyval_name(event.keyval)
154 if keyname == "Escape":
155 self.hide()
156 return True
157
158 """
159 Called to present the PersistentTooltip.
160 Overrides the superclasses show() method to include state tracking.
161 """
162 def show(self):
163 if not self.shown:
164 self.shown = True
165 gtk.Window.show(self)
166
167 """
168 Called to hide the PersistentTooltip.
169 Overrides the superclasses hide() method to include state tracking.
170 """
171 def hide(self):
172 self.shown = False
173 gtk.Window.hide(self)
174
175 """
176 Called to get the hyperlink URL from markup text.
177 """
178 def get_markup_url(self, markup):
179 url = "http:"
180 if markup and type(markup) == str:
181 s = markup
182 if 'http:' in s:
183 import re
184 url = re.search('(http:[^,\\ "]+)', s).group(0)
185
186 return url
diff --git a/bitbake/lib/bb/ui/crumbs/progress.py b/bitbake/lib/bb/ui/crumbs/progress.py
new file mode 100644
index 0000000000..1d28a111b3
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/progress.py
@@ -0,0 +1,23 @@
1import gtk
2
3class ProgressBar(gtk.Dialog):
4 def __init__(self, parent):
5
6 gtk.Dialog.__init__(self, flags=(gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT))
7 self.set_title("Parsing metadata, please wait...")
8 self.set_default_size(500, 0)
9 self.set_transient_for(parent)
10 self.progress = gtk.ProgressBar()
11 self.vbox.pack_start(self.progress)
12 self.show_all()
13
14 def set_text(self, msg):
15 self.progress.set_text(msg)
16
17 def update(self, x, y):
18 self.progress.set_fraction(float(x)/float(y))
19 self.progress.set_text("%2d %%" % (x*100/y))
20
21 def pulse(self):
22 self.progress.set_text("Loading...")
23 self.progress.pulse()
diff --git a/bitbake/lib/bb/ui/crumbs/progressbar.py b/bitbake/lib/bb/ui/crumbs/progressbar.py
new file mode 100644
index 0000000000..3e2c660e4a
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/progressbar.py
@@ -0,0 +1,59 @@
1# BitBake Graphical GTK User Interface
2#
3# Copyright (C) 2011 Intel Corporation
4#
5# Authored by Shane Wang <shane.wang@intel.com>
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20import gtk
21from bb.ui.crumbs.hobcolor import HobColors
22
23class HobProgressBar (gtk.ProgressBar):
24 def __init__(self):
25 gtk.ProgressBar.__init__(self)
26 self.set_rcstyle(True)
27 self.percentage = 0
28
29 def set_rcstyle(self, status):
30 rcstyle = gtk.RcStyle()
31 rcstyle.fg[2] = gtk.gdk.Color(HobColors.BLACK)
32 if status == "stop":
33 rcstyle.bg[3] = gtk.gdk.Color(HobColors.WARNING)
34 elif status == "fail":
35 rcstyle.bg[3] = gtk.gdk.Color(HobColors.ERROR)
36 else:
37 rcstyle.bg[3] = gtk.gdk.Color(HobColors.RUNNING)
38 self.modify_style(rcstyle)
39
40 def set_title(self, text=None):
41 if not text:
42 text = ""
43 text += " %.0f%%" % self.percentage
44 self.set_text(text)
45
46 def set_stop_title(self, text=None):
47 if not text:
48 text = ""
49 self.set_text(text)
50
51 def reset(self):
52 self.set_fraction(0)
53 self.set_text("")
54 self.set_rcstyle(True)
55 self.percentage = 0
56
57 def update(self, fraction):
58 self.percentage = int(fraction * 100)
59 self.set_fraction(fraction)
diff --git a/bitbake/lib/bb/ui/crumbs/puccho.glade b/bitbake/lib/bb/ui/crumbs/puccho.glade
new file mode 100644
index 0000000000..d7553a6e14
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/puccho.glade
@@ -0,0 +1,606 @@
1<?xml version="1.0" encoding="UTF-8" standalone="no"?>
2<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
3<!--Generated with glade3 3.4.5 on Mon Nov 10 12:24:12 2008 -->
4<glade-interface>
5 <widget class="GtkDialog" id="build_dialog">
6 <property name="title" translatable="yes">Start a build</property>
7 <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
8 <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
9 <property name="has_separator">False</property>
10 <child internal-child="vbox">
11 <widget class="GtkVBox" id="dialog-vbox1">
12 <property name="visible">True</property>
13 <property name="spacing">2</property>
14 <child>
15 <widget class="GtkTable" id="build_table">
16 <property name="visible">True</property>
17 <property name="border_width">6</property>
18 <property name="n_rows">7</property>
19 <property name="n_columns">3</property>
20 <property name="column_spacing">5</property>
21 <property name="row_spacing">6</property>
22 <child>
23 <widget class="GtkAlignment" id="status_alignment">
24 <property name="visible">True</property>
25 <property name="left_padding">12</property>
26 <child>
27 <widget class="GtkHBox" id="status_hbox">
28 <property name="spacing">6</property>
29 <child>
30 <widget class="GtkImage" id="status_image">
31 <property name="visible">True</property>
32 <property name="no_show_all">True</property>
33 <property name="xalign">0</property>
34 <property name="stock">gtk-dialog-error</property>
35 </widget>
36 <packing>
37 <property name="expand">False</property>
38 <property name="fill">False</property>
39 </packing>
40 </child>
41 <child>
42 <widget class="GtkLabel" id="status_label">
43 <property name="visible">True</property>
44 <property name="xalign">0</property>
45 <property name="label" translatable="yes">If you see this text something is wrong...</property>
46 <property name="use_markup">True</property>
47 <property name="use_underline">True</property>
48 </widget>
49 <packing>
50 <property name="position">1</property>
51 </packing>
52 </child>
53 </widget>
54 </child>
55 </widget>
56 <packing>
57 <property name="right_attach">3</property>
58 <property name="top_attach">2</property>
59 <property name="bottom_attach">3</property>
60 </packing>
61 </child>
62 <child>
63 <widget class="GtkLabel" id="label2">
64 <property name="visible">True</property>
65 <property name="xalign">0</property>
66 <property name="label" translatable="yes">&lt;b&gt;Build configuration&lt;/b&gt;</property>
67 <property name="use_markup">True</property>
68 </widget>
69 <packing>
70 <property name="right_attach">3</property>
71 <property name="top_attach">3</property>
72 <property name="bottom_attach">4</property>
73 <property name="y_options"></property>
74 </packing>
75 </child>
76 <child>
77 <widget class="GtkComboBox" id="image_combo">
78 <property name="visible">True</property>
79 <property name="sensitive">False</property>
80 </widget>
81 <packing>
82 <property name="left_attach">1</property>
83 <property name="right_attach">2</property>
84 <property name="top_attach">6</property>
85 <property name="bottom_attach">7</property>
86 <property name="y_options"></property>
87 </packing>
88 </child>
89 <child>
90 <widget class="GtkLabel" id="image_label">
91 <property name="visible">True</property>
92 <property name="sensitive">False</property>
93 <property name="xalign">0</property>
94 <property name="xpad">12</property>
95 <property name="label" translatable="yes">Image:</property>
96 </widget>
97 <packing>
98 <property name="top_attach">6</property>
99 <property name="bottom_attach">7</property>
100 <property name="y_options"></property>
101 </packing>
102 </child>
103 <child>
104 <widget class="GtkComboBox" id="distribution_combo">
105 <property name="visible">True</property>
106 <property name="sensitive">False</property>
107 </widget>
108 <packing>
109 <property name="left_attach">1</property>
110 <property name="right_attach">2</property>
111 <property name="top_attach">5</property>
112 <property name="bottom_attach">6</property>
113 <property name="y_options"></property>
114 </packing>
115 </child>
116 <child>
117 <widget class="GtkLabel" id="distribution_label">
118 <property name="visible">True</property>
119 <property name="sensitive">False</property>
120 <property name="xalign">0</property>
121 <property name="xpad">12</property>
122 <property name="label" translatable="yes">Distribution:</property>
123 </widget>
124 <packing>
125 <property name="top_attach">5</property>
126 <property name="bottom_attach">6</property>
127 <property name="y_options"></property>
128 </packing>
129 </child>
130 <child>
131 <widget class="GtkComboBox" id="machine_combo">
132 <property name="visible">True</property>
133 <property name="sensitive">False</property>
134 </widget>
135 <packing>
136 <property name="left_attach">1</property>
137 <property name="right_attach">2</property>
138 <property name="top_attach">4</property>
139 <property name="bottom_attach">5</property>
140 <property name="y_options"></property>
141 </packing>
142 </child>
143 <child>
144 <widget class="GtkLabel" id="machine_label">
145 <property name="visible">True</property>
146 <property name="sensitive">False</property>
147 <property name="xalign">0</property>
148 <property name="xpad">12</property>
149 <property name="label" translatable="yes">Machine:</property>
150 </widget>
151 <packing>
152 <property name="top_attach">4</property>
153 <property name="bottom_attach">5</property>
154 <property name="y_options"></property>
155 </packing>
156 </child>
157 <child>
158 <widget class="GtkButton" id="refresh_button">
159 <property name="visible">True</property>
160 <property name="sensitive">False</property>
161 <property name="can_focus">True</property>
162 <property name="receives_default">True</property>
163 <property name="label" translatable="yes">gtk-refresh</property>
164 <property name="use_stock">True</property>
165 <property name="response_id">0</property>
166 </widget>
167 <packing>
168 <property name="left_attach">2</property>
169 <property name="right_attach">3</property>
170 <property name="top_attach">1</property>
171 <property name="bottom_attach">2</property>
172 <property name="y_options"></property>
173 </packing>
174 </child>
175 <child>
176 <widget class="GtkEntry" id="location_entry">
177 <property name="visible">True</property>
178 <property name="can_focus">True</property>
179 <property name="width_chars">32</property>
180 </widget>
181 <packing>
182 <property name="left_attach">1</property>
183 <property name="right_attach">2</property>
184 <property name="top_attach">1</property>
185 <property name="bottom_attach">2</property>
186 <property name="y_options"></property>
187 </packing>
188 </child>
189 <child>
190 <widget class="GtkLabel" id="label3">
191 <property name="visible">True</property>
192 <property name="xalign">0</property>
193 <property name="xpad">12</property>
194 <property name="label" translatable="yes">Location:</property>
195 </widget>
196 <packing>
197 <property name="top_attach">1</property>
198 <property name="bottom_attach">2</property>
199 <property name="y_options"></property>
200 </packing>
201 </child>
202 <child>
203 <widget class="GtkLabel" id="label1">
204 <property name="visible">True</property>
205 <property name="xalign">0</property>
206 <property name="label" translatable="yes">&lt;b&gt;Repository&lt;/b&gt;</property>
207 <property name="use_markup">True</property>
208 </widget>
209 <packing>
210 <property name="right_attach">3</property>
211 <property name="y_options"></property>
212 </packing>
213 </child>
214 <child>
215 <widget class="GtkAlignment" id="alignment1">
216 <property name="visible">True</property>
217 <child>
218 <placeholder/>
219 </child>
220 </widget>
221 <packing>
222 <property name="left_attach">2</property>
223 <property name="right_attach">3</property>
224 <property name="top_attach">4</property>
225 <property name="bottom_attach">5</property>
226 <property name="y_options"></property>
227 </packing>
228 </child>
229 <child>
230 <widget class="GtkAlignment" id="alignment2">
231 <property name="visible">True</property>
232 <child>
233 <placeholder/>
234 </child>
235 </widget>
236 <packing>
237 <property name="left_attach">2</property>
238 <property name="right_attach">3</property>
239 <property name="top_attach">5</property>
240 <property name="bottom_attach">6</property>
241 <property name="y_options"></property>
242 </packing>
243 </child>
244 <child>
245 <widget class="GtkAlignment" id="alignment3">
246 <property name="visible">True</property>
247 <child>
248 <placeholder/>
249 </child>
250 </widget>
251 <packing>
252 <property name="left_attach">2</property>
253 <property name="right_attach">3</property>
254 <property name="top_attach">6</property>
255 <property name="bottom_attach">7</property>
256 <property name="y_options"></property>
257 </packing>
258 </child>
259 </widget>
260 <packing>
261 <property name="position">1</property>
262 </packing>
263 </child>
264 <child internal-child="action_area">
265 <widget class="GtkHButtonBox" id="dialog-action_area1">
266 <property name="visible">True</property>
267 <property name="layout_style">GTK_BUTTONBOX_END</property>
268 <child>
269 <placeholder/>
270 </child>
271 <child>
272 <placeholder/>
273 </child>
274 <child>
275 <placeholder/>
276 </child>
277 </widget>
278 <packing>
279 <property name="expand">False</property>
280 <property name="pack_type">GTK_PACK_END</property>
281 </packing>
282 </child>
283 </widget>
284 </child>
285 </widget>
286 <widget class="GtkDialog" id="dialog2">
287 <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
288 <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
289 <property name="has_separator">False</property>
290 <child internal-child="vbox">
291 <widget class="GtkVBox" id="dialog-vbox2">
292 <property name="visible">True</property>
293 <property name="spacing">2</property>
294 <child>
295 <widget class="GtkTable" id="table2">
296 <property name="visible">True</property>
297 <property name="border_width">6</property>
298 <property name="n_rows">7</property>
299 <property name="n_columns">3</property>
300 <property name="column_spacing">6</property>
301 <property name="row_spacing">6</property>
302 <child>
303 <widget class="GtkLabel" id="label7">
304 <property name="visible">True</property>
305 <property name="xalign">0</property>
306 <property name="label" translatable="yes">&lt;b&gt;Repositories&lt;/b&gt;</property>
307 <property name="use_markup">True</property>
308 </widget>
309 <packing>
310 <property name="right_attach">3</property>
311 <property name="y_options"></property>
312 </packing>
313 </child>
314 <child>
315 <widget class="GtkAlignment" id="alignment4">
316 <property name="visible">True</property>
317 <property name="xalign">0</property>
318 <property name="left_padding">12</property>
319 <child>
320 <widget class="GtkScrolledWindow" id="scrolledwindow1">
321 <property name="visible">True</property>
322 <property name="can_focus">True</property>
323 <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
324 <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
325 <child>
326 <widget class="GtkTreeView" id="treeview1">
327 <property name="visible">True</property>
328 <property name="can_focus">True</property>
329 <property name="headers_clickable">True</property>
330 </widget>
331 </child>
332 </widget>
333 </child>
334 </widget>
335 <packing>
336 <property name="right_attach">3</property>
337 <property name="top_attach">2</property>
338 <property name="bottom_attach">3</property>
339 <property name="y_options"></property>
340 </packing>
341 </child>
342 <child>
343 <widget class="GtkEntry" id="entry1">
344 <property name="visible">True</property>
345 <property name="can_focus">True</property>
346 </widget>
347 <packing>
348 <property name="left_attach">1</property>
349 <property name="right_attach">3</property>
350 <property name="top_attach">1</property>
351 <property name="bottom_attach">2</property>
352 <property name="y_options"></property>
353 </packing>
354 </child>
355 <child>
356 <widget class="GtkLabel" id="label9">
357 <property name="visible">True</property>
358 <property name="xalign">0</property>
359 <property name="label" translatable="yes">&lt;b&gt;Additional packages&lt;/b&gt;</property>
360 <property name="use_markup">True</property>
361 </widget>
362 <packing>
363 <property name="right_attach">3</property>
364 <property name="top_attach">4</property>
365 <property name="bottom_attach">5</property>
366 <property name="y_options"></property>
367 </packing>
368 </child>
369 <child>
370 <widget class="GtkAlignment" id="alignment6">
371 <property name="visible">True</property>
372 <property name="xalign">0</property>
373 <property name="xscale">0</property>
374 <child>
375 <widget class="GtkLabel" id="label8">
376 <property name="visible">True</property>
377 <property name="xalign">0</property>
378 <property name="yalign">0</property>
379 <property name="xpad">12</property>
380 <property name="label" translatable="yes">Location: </property>
381 </widget>
382 </child>
383 </widget>
384 <packing>
385 <property name="top_attach">1</property>
386 <property name="bottom_attach">2</property>
387 <property name="y_options"></property>
388 </packing>
389 </child>
390 <child>
391 <widget class="GtkAlignment" id="alignment7">
392 <property name="visible">True</property>
393 <property name="xalign">1</property>
394 <property name="xscale">0</property>
395 <child>
396 <widget class="GtkHButtonBox" id="hbuttonbox1">
397 <property name="visible">True</property>
398 <property name="spacing">5</property>
399 <child>
400 <widget class="GtkButton" id="button7">
401 <property name="visible">True</property>
402 <property name="can_focus">True</property>
403 <property name="receives_default">True</property>
404 <property name="label" translatable="yes">gtk-remove</property>
405 <property name="use_stock">True</property>
406 <property name="response_id">0</property>
407 </widget>
408 </child>
409 <child>
410 <widget class="GtkButton" id="button6">
411 <property name="visible">True</property>
412 <property name="can_focus">True</property>
413 <property name="receives_default">True</property>
414 <property name="label" translatable="yes">gtk-edit</property>
415 <property name="use_stock">True</property>
416 <property name="response_id">0</property>
417 </widget>
418 <packing>
419 <property name="position">1</property>
420 </packing>
421 </child>
422 <child>
423 <widget class="GtkButton" id="button5">
424 <property name="visible">True</property>
425 <property name="can_focus">True</property>
426 <property name="receives_default">True</property>
427 <property name="label" translatable="yes">gtk-add</property>
428 <property name="use_stock">True</property>
429 <property name="response_id">0</property>
430 </widget>
431 <packing>
432 <property name="position">2</property>
433 </packing>
434 </child>
435 </widget>
436 </child>
437 </widget>
438 <packing>
439 <property name="left_attach">1</property>
440 <property name="right_attach">3</property>
441 <property name="top_attach">3</property>
442 <property name="bottom_attach">4</property>
443 <property name="y_options"></property>
444 </packing>
445 </child>
446 <child>
447 <widget class="GtkAlignment" id="alignment5">
448 <property name="visible">True</property>
449 <child>
450 <placeholder/>
451 </child>
452 </widget>
453 <packing>
454 <property name="top_attach">3</property>
455 <property name="bottom_attach">4</property>
456 <property name="y_options"></property>
457 </packing>
458 </child>
459 <child>
460 <widget class="GtkLabel" id="label10">
461 <property name="visible">True</property>
462 <property name="xalign">0</property>
463 <property name="yalign">0</property>
464 <property name="xpad">12</property>
465 <property name="label" translatable="yes">Search:</property>
466 </widget>
467 <packing>
468 <property name="top_attach">5</property>
469 <property name="bottom_attach">6</property>
470 <property name="y_options"></property>
471 </packing>
472 </child>
473 <child>
474 <widget class="GtkEntry" id="entry2">
475 <property name="visible">True</property>
476 <property name="can_focus">True</property>
477 </widget>
478 <packing>
479 <property name="left_attach">1</property>
480 <property name="right_attach">3</property>
481 <property name="top_attach">5</property>
482 <property name="bottom_attach">6</property>
483 <property name="y_options"></property>
484 </packing>
485 </child>
486 <child>
487 <widget class="GtkAlignment" id="alignment8">
488 <property name="visible">True</property>
489 <property name="xalign">0</property>
490 <property name="left_padding">12</property>
491 <child>
492 <widget class="GtkScrolledWindow" id="scrolledwindow2">
493 <property name="visible">True</property>
494 <property name="can_focus">True</property>
495 <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
496 <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
497 <child>
498 <widget class="GtkTreeView" id="treeview2">
499 <property name="visible">True</property>
500 <property name="can_focus">True</property>
501 <property name="headers_clickable">True</property>
502 </widget>
503 </child>
504 </widget>
505 </child>
506 </widget>
507 <packing>
508 <property name="right_attach">3</property>
509 <property name="top_attach">6</property>
510 <property name="bottom_attach">7</property>
511 <property name="y_options"></property>
512 </packing>
513 </child>
514 </widget>
515 <packing>
516 <property name="position">1</property>
517 </packing>
518 </child>
519 <child internal-child="action_area">
520 <widget class="GtkHButtonBox" id="dialog-action_area2">
521 <property name="visible">True</property>
522 <property name="layout_style">GTK_BUTTONBOX_END</property>
523 <child>
524 <widget class="GtkButton" id="button4">
525 <property name="visible">True</property>
526 <property name="can_focus">True</property>
527 <property name="receives_default">True</property>
528 <property name="label" translatable="yes">gtk-close</property>
529 <property name="use_stock">True</property>
530 <property name="response_id">0</property>
531 </widget>
532 </child>
533 </widget>
534 <packing>
535 <property name="expand">False</property>
536 <property name="pack_type">GTK_PACK_END</property>
537 </packing>
538 </child>
539 </widget>
540 </child>
541 </widget>
542 <widget class="GtkWindow" id="main_window">
543 <child>
544 <widget class="GtkVBox" id="main_window_vbox">
545 <property name="visible">True</property>
546 <child>
547 <widget class="GtkToolbar" id="main_toolbar">
548 <property name="visible">True</property>
549 <child>
550 <widget class="GtkToolButton" id="main_toolbutton_build">
551 <property name="visible">True</property>
552 <property name="label" translatable="yes">Build</property>
553 <property name="stock_id">gtk-execute</property>
554 </widget>
555 <packing>
556 <property name="expand">False</property>
557 </packing>
558 </child>
559 </widget>
560 <packing>
561 <property name="expand">False</property>
562 </packing>
563 </child>
564 <child>
565 <widget class="GtkVPaned" id="vpaned1">
566 <property name="visible">True</property>
567 <property name="can_focus">True</property>
568 <child>
569 <widget class="GtkScrolledWindow" id="results_scrolledwindow">
570 <property name="visible">True</property>
571 <property name="can_focus">True</property>
572 <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
573 <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
574 <child>
575 <placeholder/>
576 </child>
577 </widget>
578 <packing>
579 <property name="resize">False</property>
580 <property name="shrink">True</property>
581 </packing>
582 </child>
583 <child>
584 <widget class="GtkScrolledWindow" id="progress_scrolledwindow">
585 <property name="visible">True</property>
586 <property name="can_focus">True</property>
587 <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
588 <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
589 <child>
590 <placeholder/>
591 </child>
592 </widget>
593 <packing>
594 <property name="resize">True</property>
595 <property name="shrink">True</property>
596 </packing>
597 </child>
598 </widget>
599 <packing>
600 <property name="position">1</property>
601 </packing>
602 </child>
603 </widget>
604 </child>
605 </widget>
606</glade-interface>
diff --git a/bitbake/lib/bb/ui/crumbs/recipeselectionpage.py b/bitbake/lib/bb/ui/crumbs/recipeselectionpage.py
new file mode 100755
index 0000000000..58db43f706
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/recipeselectionpage.py
@@ -0,0 +1,335 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import glib
25from bb.ui.crumbs.hobcolor import HobColors
26from bb.ui.crumbs.hobwidget import HobViewTable, HobNotebook, HobAltButton, HobButton
27from bb.ui.crumbs.hoblistmodel import RecipeListModel
28from bb.ui.crumbs.hobpages import HobPage
29
30#
31# RecipeSelectionPage
32#
33class RecipeSelectionPage (HobPage):
34 pages = [
35 {
36 'name' : 'Included recipes',
37 'tooltip' : 'The recipes currently included for your image',
38 'filter' : { RecipeListModel.COL_INC : [True],
39 RecipeListModel.COL_TYPE : ['recipe', 'packagegroup'] },
40 'search' : 'Search recipes by name',
41 'searchtip' : 'Enter a recipe name to find it',
42 'columns' : [{
43 'col_name' : 'Recipe name',
44 'col_id' : RecipeListModel.COL_NAME,
45 'col_style': 'text',
46 'col_min' : 100,
47 'col_max' : 400,
48 'expand' : 'True'
49 }, {
50 'col_name' : 'Group',
51 'col_id' : RecipeListModel.COL_GROUP,
52 'col_style': 'text',
53 'col_min' : 100,
54 'col_max' : 300,
55 'expand' : 'True'
56 }, {
57 'col_name' : 'Brought in by (+others)',
58 'col_id' : RecipeListModel.COL_BINB,
59 'col_style': 'binb',
60 'col_min' : 100,
61 'col_max' : 500,
62 'expand' : 'True'
63 }, {
64 'col_name' : 'Included',
65 'col_id' : RecipeListModel.COL_INC,
66 'col_style': 'check toggle',
67 'col_min' : 100,
68 'col_max' : 100
69 }]
70 }, {
71 'name' : 'All recipes',
72 'tooltip' : 'All recipes in your configured layers',
73 'filter' : { RecipeListModel.COL_TYPE : ['recipe'] },
74 'search' : 'Search recipes by name',
75 'searchtip' : 'Enter a recipe name to find it',
76 'columns' : [{
77 'col_name' : 'Recipe name',
78 'col_id' : RecipeListModel.COL_NAME,
79 'col_style': 'text',
80 'col_min' : 100,
81 'col_max' : 400,
82 'expand' : 'True'
83 }, {
84 'col_name' : 'Group',
85 'col_id' : RecipeListModel.COL_GROUP,
86 'col_style': 'text',
87 'col_min' : 100,
88 'col_max' : 400,
89 'expand' : 'True'
90 }, {
91 'col_name' : 'License',
92 'col_id' : RecipeListModel.COL_LIC,
93 'col_style': 'text',
94 'col_min' : 100,
95 'col_max' : 400,
96 'expand' : 'True'
97 }, {
98 'col_name' : 'Included',
99 'col_id' : RecipeListModel.COL_INC,
100 'col_style': 'check toggle',
101 'col_min' : 100,
102 'col_max' : 100
103 }]
104 }, {
105 'name' : 'Package Groups',
106 'tooltip' : 'All package groups in your configured layers',
107 'filter' : { RecipeListModel.COL_TYPE : ['packagegroup'] },
108 'search' : 'Search package groups by name',
109 'searchtip' : 'Enter a package group name to find it',
110 'columns' : [{
111 'col_name' : 'Package group name',
112 'col_id' : RecipeListModel.COL_NAME,
113 'col_style': 'text',
114 'col_min' : 100,
115 'col_max' : 400,
116 'expand' : 'True'
117 }, {
118 'col_name' : 'Included',
119 'col_id' : RecipeListModel.COL_INC,
120 'col_style': 'check toggle',
121 'col_min' : 100,
122 'col_max' : 100
123 }]
124 }
125 ]
126
127 (INCLUDED,
128 ALL,
129 TASKS) = range(3)
130
131 def __init__(self, builder = None):
132 super(RecipeSelectionPage, self).__init__(builder, "Step 1 of 2: Edit recipes")
133
134 # set invisible members
135 self.recipe_model = self.builder.recipe_model
136
137 # create visual elements
138 self.create_visual_elements()
139
140 def included_clicked_cb(self, button):
141 self.ins.set_current_page(self.INCLUDED)
142
143 def create_visual_elements(self):
144 self.eventbox = self.add_onto_top_bar(None, 73)
145 self.pack_start(self.eventbox, expand=False, fill=False)
146 self.pack_start(self.group_align, expand=True, fill=True)
147
148 # set visible members
149 self.ins = HobNotebook()
150 self.tables = [] # we need modify table when the dialog is shown
151
152 search_names = []
153 search_tips = []
154 # append the tabs in order
155 for page in self.pages:
156 columns = page['columns']
157 name = page['name']
158 tab = HobViewTable(columns, name)
159 search_names.append(page['search'])
160 search_tips.append(page['searchtip'])
161 filter = page['filter']
162 sort_model = self.recipe_model.tree_model(filter, initial=True)
163 tab.set_model(sort_model)
164 tab.connect("toggled", self.table_toggled_cb, name)
165 tab.connect("button-release-event", self.button_click_cb)
166 tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include, filter)
167 self.ins.append_page(tab, page['name'], page['tooltip'])
168 self.tables.append(tab)
169
170 self.ins.set_entry(search_names, search_tips)
171 self.ins.search.connect("changed", self.search_entry_changed)
172
173 # add all into the window
174 self.box_group_area.pack_start(self.ins, expand=True, fill=True)
175
176 button_box = gtk.HBox(False, 6)
177 self.box_group_area.pack_end(button_box, expand=False, fill=False)
178
179 self.build_packages_button = HobButton('Build packages')
180 #self.build_packages_button.set_size_request(205, 49)
181 self.build_packages_button.set_tooltip_text("Build selected recipes into packages")
182 self.build_packages_button.set_flags(gtk.CAN_DEFAULT)
183 self.build_packages_button.grab_default()
184 self.build_packages_button.connect("clicked", self.build_packages_clicked_cb)
185 button_box.pack_end(self.build_packages_button, expand=False, fill=False)
186
187 self.back_button = HobAltButton('Cancel')
188 self.back_button.connect("clicked", self.back_button_clicked_cb)
189 button_box.pack_end(self.back_button, expand=False, fill=False)
190
191 def search_entry_changed(self, entry):
192 text = entry.get_text()
193 if self.ins.search_focus:
194 self.ins.search_focus = False
195 elif self.ins.page_changed:
196 self.ins.page_change = False
197 self.filter_search(entry)
198 elif text not in self.ins.search_names:
199 self.filter_search(entry)
200
201 def filter_search(self, entry):
202 text = entry.get_text()
203 current_tab = self.ins.get_current_page()
204 filter = self.pages[current_tab]['filter']
205 filter[RecipeListModel.COL_NAME] = text
206 self.tables[current_tab].set_model(self.recipe_model.tree_model(filter, search_data=text))
207 if self.recipe_model.filtered_nb == 0:
208 if not self.ins.get_nth_page(current_tab).top_bar:
209 self.ins.get_nth_page(current_tab).add_no_result_bar(entry)
210 self.ins.get_nth_page(current_tab).top_bar.set_no_show_all(True)
211 self.ins.get_nth_page(current_tab).top_bar.show()
212 self.ins.get_nth_page(current_tab).scroll.hide()
213 else:
214 if self.ins.get_nth_page(current_tab).top_bar:
215 self.ins.get_nth_page(current_tab).top_bar.hide()
216 self.ins.get_nth_page(current_tab).scroll.show()
217 if entry.get_text() == '':
218 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
219 else:
220 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, True)
221
222 def button_click_cb(self, widget, event):
223 path, col = widget.table_tree.get_cursor()
224 tree_model = widget.table_tree.get_model()
225 if path and col.get_title() != 'Included': # else activation is likely a removal
226 properties = {'summary': '', 'name': '', 'version': '', 'revision': '', 'binb': '', 'group': '', 'license': '', 'homepage': '', 'bugtracker': '', 'description': ''}
227 properties['summary'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_SUMMARY)
228 properties['name'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_NAME)
229 properties['version'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_VERSION)
230 properties['revision'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_REVISION)
231 properties['binb'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_BINB)
232 properties['group'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_GROUP)
233 properties['license'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_LIC)
234 properties['homepage'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_HOMEPAGE)
235 properties['bugtracker'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_BUGTRACKER)
236 properties['description'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_DESC)
237 self.builder.show_recipe_property_dialog(properties)
238
239 def build_packages_clicked_cb(self, button):
240 self.refresh_tables()
241 self.builder.build_packages()
242
243 def refresh_tables(self):
244 self.ins.reset_entry(self.ins.search, 0)
245 for tab in self.tables:
246 index = self.tables.index(tab)
247 filter = self.pages[index]['filter']
248 tab.set_model(self.recipe_model.tree_model(filter, search_data="", initial=True))
249
250 def back_button_clicked_cb(self, button):
251 self.builder.recipe_model.set_selected_image(self.builder.configuration.initial_selected_image)
252 self.builder.image_configuration_page.update_image_combo(self.builder.recipe_model, self.builder.configuration.initial_selected_image)
253 self.builder.image_configuration_page.update_image_desc()
254 self.builder.show_configuration()
255 self.refresh_tables()
256
257 def refresh_selection(self):
258 self.builder.configuration.selected_image = self.recipe_model.get_selected_image()
259 _, self.builder.configuration.selected_recipes = self.recipe_model.get_selected_recipes()
260 self.ins.show_indicator_icon("Included recipes", len(self.builder.configuration.selected_recipes))
261
262 def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
263 if not self.recipe_model.path_included(path):
264 self.recipe_model.include_item(item_path=path, binb="User Selected", image_contents=False)
265 else:
266 self.pre_fadeout_checkout_include(view_tree, pagename)
267 self.recipe_model.exclude_item(item_path=path)
268 self.render_fadeout(view_tree, cell)
269
270 self.refresh_selection()
271 if not self.builder.customized:
272 self.builder.customized = True
273 self.builder.configuration.selected_image = self.recipe_model.__custom_image__
274 self.builder.rcppkglist_populated()
275
276 self.builder.window_sensitive(True)
277
278 view_model = view_tree.get_model()
279 vpath = self.recipe_model.convert_path_to_vpath(view_model, path)
280 view_tree.set_cursor(vpath)
281
282 def table_toggled_cb(self, table, cell, view_path, toggled_columnid, view_tree, pagename):
283 # Click to include a recipe
284 self.builder.window_sensitive(False)
285 view_model = view_tree.get_model()
286 path = self.recipe_model.convert_vpath_to_path(view_model, view_path)
287 glib.idle_add(self.toggle_item_idle_cb, path, view_tree, cell, pagename)
288
289 def pre_fadeout_checkout_include(self, tree, pagename):
290 #after the fadeout the table will be sorted as before
291 self.sort_column_id = self.recipe_model.sort_column_id
292 self.sort_order = self.recipe_model.sort_order
293
294 #resync the included items to a backup fade include column
295 it = self.recipe_model.get_iter_first()
296 while it:
297 active = self.recipe_model.get_value(it, self.recipe_model.COL_INC)
298 self.recipe_model.set(it, self.recipe_model.COL_FADE_INC, active)
299 it = self.recipe_model.iter_next(it)
300 # Check out a model which base on the column COL_FADE_INC,
301 # it's save the prev state of column COL_INC before do exclude_item
302 filter = { RecipeListModel.COL_FADE_INC:[True] }
303 if pagename == "Included recipes":
304 filter[RecipeListModel.COL_TYPE] = ['recipe', 'packagegroup']
305 elif pagename == "All recipes":
306 filter[RecipeListModel.COL_TYPE] = ['recipe']
307 else:
308 filter[RecipeListModel.COL_TYPE] = ['packagegroup']
309
310 new_model = self.recipe_model.tree_model(filter, excluded_items_ahead=True)
311 tree.set_model(new_model)
312
313 def render_fadeout(self, tree, cell):
314 if (not cell) or (not tree):
315 return
316 to_render_cells = []
317 model = tree.get_model()
318 it = model.get_iter_first()
319 while it:
320 path = model.get_path(it)
321 prev_cell_is_active = model.get_value(it, RecipeListModel.COL_FADE_INC)
322 curr_cell_is_active = model.get_value(it, RecipeListModel.COL_INC)
323 if (prev_cell_is_active == True) and (curr_cell_is_active == False):
324 to_render_cells.append(path)
325 it = model.iter_next(it)
326
327 cell.fadeout(tree, 1000, to_render_cells)
328
329 def after_fadeout_checkin_include(self, table, ctrl, cell, tree, filter):
330 self.recipe_model.sort_column_id = self.sort_column_id
331 self.recipe_model.sort_order = self.sort_order
332 tree.set_model(self.recipe_model.tree_model(filter))
333
334 def set_recipe_curr_tab(self, curr_page):
335 self.ins.set_current_page(curr_page)
diff --git a/bitbake/lib/bb/ui/crumbs/runningbuild.py b/bitbake/lib/bb/ui/crumbs/runningbuild.py
new file mode 100644
index 0000000000..16a955d2b1
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/runningbuild.py
@@ -0,0 +1,551 @@
1
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2008 Intel Corporation
6#
7# Authored by Rob Bradford <rob@linux.intel.com>
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import gtk
23import gobject
24import logging
25import time
26import urllib
27import urllib2
28import pango
29from bb.ui.crumbs.hobcolor import HobColors
30from bb.ui.crumbs.hobwidget import HobWarpCellRendererText, HobCellRendererPixbuf
31
32class RunningBuildModel (gtk.TreeStore):
33 (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
34
35 def __init__ (self):
36 gtk.TreeStore.__init__ (self,
37 gobject.TYPE_STRING,
38 gobject.TYPE_STRING,
39 gobject.TYPE_STRING,
40 gobject.TYPE_STRING,
41 gobject.TYPE_STRING,
42 gobject.TYPE_STRING,
43 gobject.TYPE_INT)
44
45 def failure_model_filter(self, model, it):
46 color = model.get(it, self.COL_COLOR)[0]
47 if not color:
48 return False
49 if color == HobColors.ERROR or color == HobColors.WARNING:
50 return True
51 return False
52
53 def failure_model(self):
54 model = self.filter_new()
55 model.set_visible_func(self.failure_model_filter)
56 return model
57
58 def foreach_cell_func(self, model, path, iter, usr_data=None):
59 if model.get_value(iter, self.COL_ICON) == "gtk-execute":
60 model.set(iter, self.COL_ICON, "")
61
62 def close_task_refresh(self):
63 self.foreach(self.foreach_cell_func, None)
64
65class RunningBuild (gobject.GObject):
66 __gsignals__ = {
67 'build-started' : (gobject.SIGNAL_RUN_LAST,
68 gobject.TYPE_NONE,
69 ()),
70 'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
71 gobject.TYPE_NONE,
72 ()),
73 'build-failed' : (gobject.SIGNAL_RUN_LAST,
74 gobject.TYPE_NONE,
75 ()),
76 'build-complete' : (gobject.SIGNAL_RUN_LAST,
77 gobject.TYPE_NONE,
78 ()),
79 'build-aborted' : (gobject.SIGNAL_RUN_LAST,
80 gobject.TYPE_NONE,
81 ()),
82 'task-started' : (gobject.SIGNAL_RUN_LAST,
83 gobject.TYPE_NONE,
84 (gobject.TYPE_PYOBJECT,)),
85 'log-error' : (gobject.SIGNAL_RUN_LAST,
86 gobject.TYPE_NONE,
87 ()),
88 'log-warning' : (gobject.SIGNAL_RUN_LAST,
89 gobject.TYPE_NONE,
90 ()),
91 'disk-full' : (gobject.SIGNAL_RUN_LAST,
92 gobject.TYPE_NONE,
93 ()),
94 'no-provider' : (gobject.SIGNAL_RUN_LAST,
95 gobject.TYPE_NONE,
96 (gobject.TYPE_PYOBJECT,)),
97 'log' : (gobject.SIGNAL_RUN_LAST,
98 gobject.TYPE_NONE,
99 (gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
100 }
101 pids_to_task = {}
102 tasks_to_iter = {}
103
104 def __init__ (self, sequential=False):
105 gobject.GObject.__init__ (self)
106 self.model = RunningBuildModel()
107 self.sequential = sequential
108 self.buildaborted = False
109
110 def reset (self):
111 self.pids_to_task.clear()
112 self.tasks_to_iter.clear()
113 self.model.clear()
114
115 def handle_event (self, event, pbar=None):
116 # Handle an event from the event queue, this may result in updating
117 # the model and thus the UI. Or it may be to tell us that the build
118 # has finished successfully (or not, as the case may be.)
119
120 parent = None
121 pid = 0
122 package = None
123 task = None
124
125 # If we have a pid attached to this message/event try and get the
126 # (package, task) pair for it. If we get that then get the parent iter
127 # for the message.
128 if hasattr(event, 'pid'):
129 pid = event.pid
130 if hasattr(event, 'process'):
131 pid = event.process
132
133 if pid and pid in self.pids_to_task:
134 (package, task) = self.pids_to_task[pid]
135 parent = self.tasks_to_iter[(package, task)]
136
137 if(isinstance(event, logging.LogRecord)):
138 if event.taskpid == 0 or event.levelno > logging.INFO:
139 self.emit("log", "handle", event)
140 # FIXME: this is a hack! More info in Yocto #1433
141 # http://bugzilla.pokylinux.org/show_bug.cgi?id=1433, temporarily
142 # mask the error message as it's not informative for the user.
143 if event.msg.startswith("Execution of event handler 'run_buildstats' failed"):
144 return
145
146 if (event.levelno < logging.INFO or
147 event.msg.startswith("Running task")):
148 return # don't add these to the list
149
150 if event.levelno >= logging.ERROR:
151 icon = "dialog-error"
152 color = HobColors.ERROR
153 self.emit("log-error")
154 elif event.levelno >= logging.WARNING:
155 icon = "dialog-warning"
156 color = HobColors.WARNING
157 self.emit("log-warning")
158 else:
159 icon = None
160 color = HobColors.OK
161
162 # if we know which package we belong to, we'll append onto its list.
163 # otherwise, we'll jump to the top of the master list
164 if self.sequential or not parent:
165 tree_add = self.model.append
166 else:
167 tree_add = self.model.prepend
168 tree_add(parent,
169 (None,
170 package,
171 task,
172 event.getMessage(),
173 icon,
174 color,
175 0))
176
177 # if there are warnings while processing a package
178 # (parent), mark the task with warning color;
179 # in case there are errors, the updates will be
180 # handled on TaskFailed.
181 if color == HobColors.WARNING and parent:
182 self.model.set(parent, self.model.COL_COLOR, color)
183 if task: #then we have a parent (package), and update it's color
184 self.model.set(self.tasks_to_iter[(package, None)], self.model.COL_COLOR, color)
185
186 elif isinstance(event, bb.build.TaskStarted):
187 (package, task) = (event._package, event._task)
188
189 # Save out this PID.
190 self.pids_to_task[pid] = (package, task)
191
192 # Check if we already have this package in our model. If so then
193 # that can be the parent for the task. Otherwise we create a new
194 # top level for the package.
195 if ((package, None) in self.tasks_to_iter):
196 parent = self.tasks_to_iter[(package, None)]
197 else:
198 if self.sequential:
199 add = self.model.append
200 else:
201 add = self.model.prepend
202 parent = add(None, (None,
203 package,
204 None,
205 "Package: %s" % (package),
206 None,
207 HobColors.OK,
208 0))
209 self.tasks_to_iter[(package, None)] = parent
210
211 # Because this parent package now has an active child mark it as
212 # such.
213 self.model.set(parent, self.model.COL_ICON, "gtk-execute")
214 parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
215 if parent_color != HobColors.ERROR and parent_color != HobColors.WARNING:
216 self.model.set(parent, self.model.COL_COLOR, HobColors.RUNNING)
217
218 # Add an entry in the model for this task
219 i = self.model.append (parent, (None,
220 package,
221 task,
222 "Task: %s" % (task),
223 "gtk-execute",
224 HobColors.RUNNING,
225 0))
226
227 # update the parent's active task count
228 num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] + 1
229 self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
230
231 # Save out the iter so that we can find it when we have a message
232 # that we need to attach to a task.
233 self.tasks_to_iter[(package, task)] = i
234
235 elif isinstance(event, bb.build.TaskBase):
236 self.emit("log", "info", event._message)
237 current = self.tasks_to_iter[(package, task)]
238 parent = self.tasks_to_iter[(package, None)]
239
240 # remove this task from the parent's active count
241 num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] - 1
242 self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
243
244 if isinstance(event, bb.build.TaskFailed):
245 # Mark the task and parent as failed
246 icon = "dialog-error"
247 color = HobColors.ERROR
248
249 logfile = event.logfile
250 if logfile and os.path.exists(logfile):
251 with open(logfile) as f:
252 logdata = f.read()
253 self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', HobColors.OK, 0))
254
255 for i in (current, parent):
256 self.model.set(i, self.model.COL_ICON, icon,
257 self.model.COL_COLOR, color)
258 else:
259 # Mark the parent package and the task as inactive,
260 # but make sure to preserve error, warnings and active
261 # states
262 parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
263 task_color = self.model.get(current, self.model.COL_COLOR)[0]
264
265 # Mark the task as inactive
266 self.model.set(current, self.model.COL_ICON, None)
267 if task_color != HobColors.ERROR:
268 if task_color == HobColors.WARNING:
269 self.model.set(current, self.model.COL_ICON, 'dialog-warning')
270 else:
271 self.model.set(current, self.model.COL_COLOR, HobColors.OK)
272
273 # Mark the parent as inactive
274 if parent_color != HobColors.ERROR:
275 if parent_color == HobColors.WARNING:
276 self.model.set(parent, self.model.COL_ICON, "dialog-warning")
277 else:
278 self.model.set(parent, self.model.COL_ICON, None)
279 if num_active == 0:
280 self.model.set(parent, self.model.COL_COLOR, HobColors.OK)
281
282 # Clear the iters and the pids since when the task goes away the
283 # pid will no longer be used for messages
284 del self.tasks_to_iter[(package, task)]
285 del self.pids_to_task[pid]
286
287 elif isinstance(event, bb.event.BuildStarted):
288
289 self.emit("build-started")
290 self.model.prepend(None, (None,
291 None,
292 None,
293 "Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
294 None,
295 HobColors.OK,
296 0))
297 if pbar:
298 pbar.update(0, self.progress_total)
299 pbar.set_title(bb.event.getName(event))
300
301 elif isinstance(event, bb.event.BuildCompleted):
302 failures = int (event._failures)
303 self.model.prepend(None, (None,
304 None,
305 None,
306 "Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
307 None,
308 HobColors.OK,
309 0))
310
311 # Emit the appropriate signal depending on the number of failures
312 if self.buildaborted:
313 self.emit ("build-aborted")
314 self.buildaborted = False
315 elif (failures >= 1):
316 self.emit ("build-failed")
317 else:
318 self.emit ("build-succeeded")
319 # Emit a generic "build-complete" signal for things wishing to
320 # handle when the build is finished
321 self.emit("build-complete")
322 # reset the all cell's icon indicator
323 self.model.close_task_refresh()
324 if pbar:
325 pbar.set_text(event.msg)
326
327 elif isinstance(event, bb.event.DiskFull):
328 self.buildaborted = True
329 self.emit("disk-full")
330
331 elif isinstance(event, bb.command.CommandFailed):
332 self.emit("log", "error", "Command execution failed: %s" % (event.error))
333 if event.error.startswith("Exited with"):
334 # If the command fails with an exit code we're done, emit the
335 # generic signal for the UI to notify the user
336 self.emit("build-complete")
337 # reset the all cell's icon indicator
338 self.model.close_task_refresh()
339
340 elif isinstance(event, bb.event.CacheLoadStarted) and pbar:
341 pbar.set_title("Loading cache")
342 self.progress_total = event.total
343 pbar.update(0, self.progress_total)
344 elif isinstance(event, bb.event.CacheLoadProgress) and pbar:
345 pbar.update(event.current, self.progress_total)
346 elif isinstance(event, bb.event.CacheLoadCompleted) and pbar:
347 pbar.update(self.progress_total, self.progress_total)
348 pbar.hide()
349 elif isinstance(event, bb.event.ParseStarted) and pbar:
350 if event.total == 0:
351 return
352 pbar.set_title("Processing recipes")
353 self.progress_total = event.total
354 pbar.update(0, self.progress_total)
355 elif isinstance(event, bb.event.ParseProgress) and pbar:
356 pbar.update(event.current, self.progress_total)
357 elif isinstance(event, bb.event.ParseCompleted) and pbar:
358 pbar.hide()
359 #using runqueue events as many as possible to update the progress bar
360 elif isinstance(event, bb.runqueue.runQueueTaskFailed):
361 self.emit("log", "error", "Task %s (%s) failed with exit code '%s'" % (event.taskid, event.taskstring, event.exitcode))
362 elif isinstance(event, bb.runqueue.sceneQueueTaskFailed):
363 self.emit("log", "warn", "Setscene task %s (%s) failed with exit code '%s' - real task will be run instead" \
364 % (event.taskid, event.taskstring, event.exitcode))
365 elif isinstance(event, (bb.runqueue.runQueueTaskStarted, bb.runqueue.sceneQueueTaskStarted)):
366 if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
367 self.emit("log", "info", "Running setscene task %d of %d (%s)" % \
368 (event.stats.completed + event.stats.active + event.stats.failed + 1,
369 event.stats.total, event.taskstring))
370 else:
371 if event.noexec:
372 tasktype = 'noexec task'
373 else:
374 tasktype = 'task'
375 self.emit("log", "info", "Running %s %s of %s (ID: %s, %s)" % \
376 (tasktype, event.stats.completed + event.stats.active + event.stats.failed + 1,
377 event.stats.total, event.taskid, event.taskstring))
378 message = {}
379 message["eventname"] = bb.event.getName(event)
380 num_of_completed = event.stats.completed + event.stats.failed
381 message["current"] = num_of_completed
382 message["total"] = event.stats.total
383 message["title"] = ""
384 message["task"] = event.taskstring
385 self.emit("task-started", message)
386 elif isinstance(event, bb.event.MultipleProviders):
387 self.emit("log", "info", "multiple providers are available for %s%s (%s)" \
388 % (event._is_runtime and "runtime " or "", event._item, ", ".join(event._candidates)))
389 self.emit("log", "info", "consider defining a PREFERRED_PROVIDER entry to match %s" % (event._item))
390 elif isinstance(event, bb.event.NoProvider):
391 msg = ""
392 if event._runtime:
393 r = "R"
394 else:
395 r = ""
396
397 extra = ''
398 if not event._reasons:
399 if event._close_matches:
400 extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
401
402 if event._dependees:
403 msg = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s\n" % (r, event._item, ", ".join(event._dependees), r, extra)
404 else:
405 msg = "Nothing %sPROVIDES '%s'%s\n" % (r, event._item, extra)
406 if event._reasons:
407 for reason in event._reasons:
408 msg += ("%s\n" % reason)
409 self.emit("no-provider", msg)
410 self.emit("log", "error", msg)
411 elif isinstance(event, bb.event.LogExecTTY):
412 icon = "dialog-warning"
413 color = HobColors.WARNING
414 if self.sequential or not parent:
415 tree_add = self.model.append
416 else:
417 tree_add = self.model.prepend
418 tree_add(parent,
419 (None,
420 package,
421 task,
422 event.msg,
423 icon,
424 color,
425 0))
426 else:
427 if not isinstance(event, (bb.event.BuildBase,
428 bb.event.StampUpdate,
429 bb.event.ConfigParsed,
430 bb.event.RecipeParsed,
431 bb.event.RecipePreFinalise,
432 bb.runqueue.runQueueEvent,
433 bb.runqueue.runQueueExitWait,
434 bb.event.OperationStarted,
435 bb.event.OperationCompleted,
436 bb.event.OperationProgress)):
437 self.emit("log", "error", "Unknown event: %s" % (event.error if hasattr(event, 'error') else 'error'))
438
439 return
440
441
442def do_pastebin(text):
443 url = 'http://pastebin.com/api_public.php'
444 params = {'paste_code': text, 'paste_format': 'text'}
445
446 req = urllib2.Request(url, urllib.urlencode(params))
447 response = urllib2.urlopen(req)
448 paste_url = response.read()
449
450 return paste_url
451
452
453class RunningBuildTreeView (gtk.TreeView):
454 __gsignals__ = {
455 "button_press_event" : "override"
456 }
457 def __init__ (self, readonly=False, hob=False):
458 gtk.TreeView.__init__ (self)
459 self.readonly = readonly
460
461 # The icon that indicates whether we're building or failed.
462 # add 'hob' flag because there has not only hob to share this code
463 if hob:
464 renderer = HobCellRendererPixbuf ()
465 else:
466 renderer = gtk.CellRendererPixbuf()
467 col = gtk.TreeViewColumn ("Status", renderer)
468 col.add_attribute (renderer, "icon-name", 4)
469 self.append_column (col)
470
471 # The message of the build.
472 # add 'hob' flag because there has not only hob to share this code
473 if hob:
474 self.message_renderer = HobWarpCellRendererText (col_number=1)
475 else:
476 self.message_renderer = gtk.CellRendererText ()
477 self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=3)
478 self.message_column.add_attribute(self.message_renderer, 'background', 5)
479 self.message_renderer.set_property('editable', (not self.readonly))
480 self.append_column (self.message_column)
481
482 def do_button_press_event(self, event):
483 gtk.TreeView.do_button_press_event(self, event)
484
485 if event.button == 3:
486 selection = super(RunningBuildTreeView, self).get_selection()
487 (model, it) = selection.get_selected()
488 if it is not None:
489 can_paste = model.get(it, model.COL_LOG)[0]
490 if can_paste == 'pastebin':
491 # build a simple menu with a pastebin option
492 menu = gtk.Menu()
493 menuitem = gtk.MenuItem("Copy")
494 menu.append(menuitem)
495 menuitem.connect("activate", self.clipboard_handler, (model, it))
496 menuitem.show()
497 menuitem = gtk.MenuItem("Send log to pastebin")
498 menu.append(menuitem)
499 menuitem.connect("activate", self.pastebin_handler, (model, it))
500 menuitem.show()
501 menu.show()
502 menu.popup(None, None, None, event.button, event.time)
503
504 def _add_to_clipboard(self, clipping):
505 """
506 Add the contents of clipping to the system clipboard.
507 """
508 clipboard = gtk.clipboard_get()
509 clipboard.set_text(clipping)
510 clipboard.store()
511
512 def pastebin_handler(self, widget, data):
513 """
514 Send the log data to pastebin, then add the new paste url to the
515 clipboard.
516 """
517 (model, it) = data
518 paste_url = do_pastebin(model.get(it, model.COL_MESSAGE)[0])
519
520 # @todo Provide visual feedback to the user that it is done and that
521 # it worked.
522 print paste_url
523
524 self._add_to_clipboard(paste_url)
525
526 def clipboard_handler(self, widget, data):
527 """
528 """
529 (model, it) = data
530 message = model.get(it, model.COL_MESSAGE)[0]
531
532 self._add_to_clipboard(message)
533
534class BuildFailureTreeView(gtk.TreeView):
535
536 def __init__ (self):
537 gtk.TreeView.__init__(self)
538 self.set_rules_hint(False)
539 self.set_headers_visible(False)
540 self.get_selection().set_mode(gtk.SELECTION_SINGLE)
541
542 # The icon that indicates whether we're building or failed.
543 renderer = HobCellRendererPixbuf ()
544 col = gtk.TreeViewColumn ("Status", renderer)
545 col.add_attribute (renderer, "icon-name", RunningBuildModel.COL_ICON)
546 self.append_column (col)
547
548 # The message of the build.
549 self.message_renderer = HobWarpCellRendererText (col_number=1)
550 self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=RunningBuildModel.COL_MESSAGE, background=RunningBuildModel.COL_COLOR)
551 self.append_column (self.message_column)
diff --git a/bitbake/lib/bb/ui/crumbs/sanitycheckpage.py b/bitbake/lib/bb/ui/crumbs/sanitycheckpage.py
new file mode 100644
index 0000000000..76ce2ecc23
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/sanitycheckpage.py
@@ -0,0 +1,85 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Bogdan Marinescu <bogdan.a.marinescu@intel.com>
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import gtk, gobject
23from bb.ui.crumbs.progressbar import HobProgressBar
24from bb.ui.crumbs.hobwidget import hic
25from bb.ui.crumbs.hobpages import HobPage
26
27#
28# SanityCheckPage
29#
30class SanityCheckPage (HobPage):
31
32 def __init__(self, builder):
33 super(SanityCheckPage, self).__init__(builder)
34 self.running = False
35 self.create_visual_elements()
36 self.show_all()
37
38 def make_label(self, text, bold=True):
39 label = gtk.Label()
40 label.set_alignment(0.0, 0.5)
41 mark = "<span %s>%s</span>" % (self.span_tag('x-large', 'bold') if bold else self.span_tag('medium'), text)
42 label.set_markup(mark)
43 return label
44
45 def start(self):
46 if not self.running:
47 self.running = True
48 gobject.timeout_add(100, self.timer_func)
49
50 def stop(self):
51 self.running = False
52
53 def is_running(self):
54 return self.running
55
56 def timer_func(self):
57 self.progress_bar.pulse()
58 return self.running
59
60 def create_visual_elements(self):
61 # Table'd layout. 'rows' and 'cols' give the table size
62 rows, cols = 30, 50
63 self.table = gtk.Table(rows, cols, True)
64 self.pack_start(self.table, expand=False, fill=False)
65 sx, sy = 2, 2
66 # 'info' icon
67 image = gtk.Image()
68 image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
69 self.table.attach(image, sx, sx + 2, sy, sy + 3 )
70 image.show()
71 # 'Checking' message
72 label = self.make_label('Hob is checking for correct build system setup')
73 self.table.attach(label, sx + 2, cols, sy, sy + 3, xpadding=5 )
74 label.show()
75 # 'Shouldn't take long' message.
76 label = self.make_label("The check shouldn't take long.", False)
77 self.table.attach(label, sx + 2, cols, sy + 3, sy + 4, xpadding=5)
78 label.show()
79 # Progress bar
80 self.progress_bar = HobProgressBar()
81 self.table.attach(self.progress_bar, sx + 2, cols - 3, sy + 5, sy + 7, xpadding=5)
82 self.progress_bar.show()
83 # All done
84 self.table.show()
85
diff --git a/bitbake/lib/bb/ui/crumbs/utils.py b/bitbake/lib/bb/ui/crumbs/utils.py
new file mode 100644
index 0000000000..939864fa6f
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/utils.py
@@ -0,0 +1,34 @@
1#
2# BitBake UI Utils
3#
4# Copyright (C) 2012 Intel Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19# This utility method looks for xterm or vte and return the
20# frist to exist, currently we are keeping this simple, but
21# we will likely move the oe.terminal implementation into
22# bitbake which will allow more flexibility.
23
24import os
25import bb
26
27def which_terminal():
28 term = bb.utils.which(os.environ["PATH"], "xterm")
29 if term:
30 return term + " -e "
31 term = bb.utils.which(os.environ["PATH"], "vte")
32 if term:
33 return term + " -c "
34 return None
diff --git a/bitbake/lib/bb/ui/depexp.py b/bitbake/lib/bb/ui/depexp.py
new file mode 100644
index 0000000000..4578dce615
--- /dev/null
+++ b/bitbake/lib/bb/ui/depexp.py
@@ -0,0 +1,326 @@
1#
2# BitBake Graphical GTK based Dependency Explorer
3#
4# Copyright (C) 2007 Ross Burton
5# Copyright (C) 2007 - 2008 Richard Purdie
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20import gobject
21import gtk
22import Queue
23import threading
24import xmlrpclib
25import bb
26import bb.event
27from bb.ui.crumbs.progressbar import HobProgressBar
28
29# Package Model
30(COL_PKG_NAME) = (0)
31
32# Dependency Model
33(TYPE_DEP, TYPE_RDEP) = (0, 1)
34(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
35
36
37class PackageDepView(gtk.TreeView):
38 def __init__(self, model, dep_type, label):
39 gtk.TreeView.__init__(self)
40 self.current = None
41 self.dep_type = dep_type
42 self.filter_model = model.filter_new()
43 self.filter_model.set_visible_func(self._filter)
44 self.set_model(self.filter_model)
45 #self.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
46 self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PACKAGE))
47
48 def _filter(self, model, iter):
49 (this_type, package) = model.get(iter, COL_DEP_TYPE, COL_DEP_PARENT)
50 if this_type != self.dep_type: return False
51 return package == self.current
52
53 def set_current_package(self, package):
54 self.current = package
55 self.filter_model.refilter()
56
57
58class PackageReverseDepView(gtk.TreeView):
59 def __init__(self, model, label):
60 gtk.TreeView.__init__(self)
61 self.current = None
62 self.filter_model = model.filter_new()
63 self.filter_model.set_visible_func(self._filter)
64 self.set_model(self.filter_model)
65 self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PARENT))
66
67 def _filter(self, model, iter):
68 package = model.get_value(iter, COL_DEP_PACKAGE)
69 return package == self.current
70
71 def set_current_package(self, package):
72 self.current = package
73 self.filter_model.refilter()
74
75
76class DepExplorer(gtk.Window):
77 def __init__(self):
78 gtk.Window.__init__(self)
79 self.set_title("Dependency Explorer")
80 self.set_default_size(500, 500)
81 self.connect("delete-event", gtk.main_quit)
82
83 # Create the data models
84 self.pkg_model = gtk.ListStore(gobject.TYPE_STRING)
85 self.pkg_model.set_sort_column_id(COL_PKG_NAME, gtk.SORT_ASCENDING)
86 self.depends_model = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)
87 self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, gtk.SORT_ASCENDING)
88
89 pane = gtk.HPaned()
90 pane.set_position(250)
91 self.add(pane)
92
93 # The master list of packages
94 scrolled = gtk.ScrolledWindow()
95 scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
96 scrolled.set_shadow_type(gtk.SHADOW_IN)
97
98 self.pkg_treeview = gtk.TreeView(self.pkg_model)
99 self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
100 column = gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME)
101 self.pkg_treeview.append_column(column)
102 pane.add1(scrolled)
103 scrolled.add(self.pkg_treeview)
104
105 box = gtk.VBox(homogeneous=True, spacing=4)
106
107 # Runtime Depends
108 scrolled = gtk.ScrolledWindow()
109 scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
110 scrolled.set_shadow_type(gtk.SHADOW_IN)
111 self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends")
112 self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
113 scrolled.add(self.rdep_treeview)
114 box.add(scrolled)
115
116 # Build Depends
117 scrolled = gtk.ScrolledWindow()
118 scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
119 scrolled.set_shadow_type(gtk.SHADOW_IN)
120 self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends")
121 self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
122 scrolled.add(self.dep_treeview)
123 box.add(scrolled)
124 pane.add2(box)
125
126 # Reverse Depends
127 scrolled = gtk.ScrolledWindow()
128 scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
129 scrolled.set_shadow_type(gtk.SHADOW_IN)
130 self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends")
131 self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
132 scrolled.add(self.revdep_treeview)
133 box.add(scrolled)
134 pane.add2(box)
135
136 self.show_all()
137
138 def on_package_activated(self, treeview, path, column, data_col):
139 model = treeview.get_model()
140 package = model.get_value(model.get_iter(path), data_col)
141
142 pkg_path = []
143 def finder(model, path, iter, needle):
144 package = model.get_value(iter, COL_PKG_NAME)
145 if package == needle:
146 pkg_path.append(path)
147 return True
148 else:
149 return False
150 self.pkg_model.foreach(finder, package)
151 if pkg_path:
152 self.pkg_treeview.get_selection().select_path(pkg_path[0])
153 self.pkg_treeview.scroll_to_cell(pkg_path[0])
154
155 def on_cursor_changed(self, selection):
156 (model, it) = selection.get_selected()
157 if it is None:
158 current_package = None
159 else:
160 current_package = model.get_value(it, COL_PKG_NAME)
161 self.rdep_treeview.set_current_package(current_package)
162 self.dep_treeview.set_current_package(current_package)
163 self.revdep_treeview.set_current_package(current_package)
164
165
166 def parse(self, depgraph):
167 for package in depgraph["pn"]:
168 self.pkg_model.insert(0, (package,))
169
170 for package in depgraph["depends"]:
171 for depend in depgraph["depends"][package]:
172 self.depends_model.insert (0, (TYPE_DEP, package, depend))
173
174 for package in depgraph["rdepends-pn"]:
175 for rdepend in depgraph["rdepends-pn"][package]:
176 self.depends_model.insert (0, (TYPE_RDEP, package, rdepend))
177
178
179class gtkthread(threading.Thread):
180 quit = threading.Event()
181 def __init__(self, shutdown):
182 threading.Thread.__init__(self)
183 self.setDaemon(True)
184 self.shutdown = shutdown
185
186 def run(self):
187 gobject.threads_init()
188 gtk.gdk.threads_init()
189 gtk.main()
190 gtkthread.quit.set()
191
192
193def main(server, eventHandler, params):
194 try:
195 params.updateFromServer(server)
196 cmdline = params.parseActions()
197 if not cmdline:
198 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
199 return 1
200 if 'msg' in cmdline and cmdline['msg']:
201 logger.error(cmdline['msg'])
202 return 1
203 cmdline = cmdline['action']
204 if not cmdline or cmdline[0] != "generateDotGraph":
205 print("This UI requires the -g option")
206 return 1
207 ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
208 if error:
209 print("Error running command '%s': %s" % (cmdline, error))
210 return 1
211 elif ret != True:
212 print("Error running command '%s': returned %s" % (cmdline, ret))
213 return 1
214 except xmlrpclib.Fault as x:
215 print("XMLRPC Fault getting commandline:\n %s" % x)
216 return
217
218 shutdown = 0
219
220 gtkgui = gtkthread(shutdown)
221 gtkgui.start()
222
223 gtk.gdk.threads_enter()
224 dep = DepExplorer()
225 bardialog = gtk.Dialog(parent=dep,
226 flags=gtk.DIALOG_MODAL|gtk.DIALOG_DESTROY_WITH_PARENT)
227 bardialog.set_default_size(400, 50)
228 pbar = HobProgressBar()
229 bardialog.vbox.pack_start(pbar)
230 bardialog.show_all()
231 bardialog.connect("delete-event", gtk.main_quit)
232 gtk.gdk.threads_leave()
233
234 progress_total = 0
235 while True:
236 try:
237 event = eventHandler.waitEvent(0.25)
238 if gtkthread.quit.isSet():
239 _, error = server.runCommand(["stateStop"])
240 if error:
241 print('Unable to cleanly stop: %s' % error)
242 break
243
244 if event is None:
245 continue
246
247 if isinstance(event, bb.event.CacheLoadStarted):
248 progress_total = event.total
249 gtk.gdk.threads_enter()
250 bardialog.set_title("Loading Cache")
251 pbar.update(0)
252 gtk.gdk.threads_leave()
253
254 if isinstance(event, bb.event.CacheLoadProgress):
255 x = event.current
256 gtk.gdk.threads_enter()
257 pbar.update(x * 1.0 / progress_total)
258 pbar.set_title('')
259 gtk.gdk.threads_leave()
260 continue
261
262 if isinstance(event, bb.event.CacheLoadCompleted):
263 bardialog.hide()
264 continue
265
266 if isinstance(event, bb.event.ParseStarted):
267 progress_total = event.total
268 if progress_total == 0:
269 continue
270 gtk.gdk.threads_enter()
271 pbar.update(0)
272 bardialog.set_title("Processing recipes")
273
274 gtk.gdk.threads_leave()
275
276 if isinstance(event, bb.event.ParseProgress):
277 x = event.current
278 gtk.gdk.threads_enter()
279 pbar.update(x * 1.0 / progress_total)
280 pbar.set_title('')
281 gtk.gdk.threads_leave()
282 continue
283
284 if isinstance(event, bb.event.ParseCompleted):
285 bardialog.hide()
286 continue
287
288 if isinstance(event, bb.event.DepTreeGenerated):
289 gtk.gdk.threads_enter()
290 dep.parse(event._depgraph)
291 gtk.gdk.threads_leave()
292
293 if isinstance(event, bb.command.CommandCompleted):
294 continue
295
296 if isinstance(event, bb.command.CommandFailed):
297 print("Command execution failed: %s" % event.error)
298 return event.exitcode
299
300 if isinstance(event, bb.command.CommandExit):
301 return event.exitcode
302
303 if isinstance(event, bb.cooker.CookerExit):
304 break
305
306 continue
307 except EnvironmentError as ioerror:
308 # ignore interrupted io
309 if ioerror.args[0] == 4:
310 pass
311 except KeyboardInterrupt:
312 if shutdown == 2:
313 print("\nThird Keyboard Interrupt, exit.\n")
314 break
315 if shutdown == 1:
316 print("\nSecond Keyboard Interrupt, stopping...\n")
317 _, error = server.runCommand(["stateForceShutdown"])
318 if error:
319 print('Unable to cleanly stop: %s' % error)
320 if shutdown == 0:
321 print("\nKeyboard Interrupt, closing down...\n")
322 _, error = server.runCommand(["stateShutdown"])
323 if error:
324 print('Unable to cleanly shutdown: %s' % error)
325 shutdown = shutdown + 1
326 pass
diff --git a/bitbake/lib/bb/ui/goggle.py b/bitbake/lib/bb/ui/goggle.py
new file mode 100644
index 0000000000..f4ee7b41ae
--- /dev/null
+++ b/bitbake/lib/bb/ui/goggle.py
@@ -0,0 +1,121 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2008 Intel Corporation
5#
6# Authored by Rob Bradford <rob@linux.intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gobject
22import gtk
23import xmlrpclib
24from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
25from bb.ui.crumbs.progress import ProgressBar
26
27import Queue
28
29
30def event_handle_idle_func (eventHandler, build, pbar):
31
32 # Consume as many messages as we can in the time available to us
33 event = eventHandler.getEvent()
34 while event:
35 build.handle_event (event, pbar)
36 event = eventHandler.getEvent()
37
38 return True
39
40def scroll_tv_cb (model, path, iter, view):
41 view.scroll_to_cell (path)
42
43
44# @todo hook these into the GUI so the user has feedback...
45def running_build_failed_cb (running_build):
46 pass
47
48
49def running_build_succeeded_cb (running_build):
50 pass
51
52
53class MainWindow (gtk.Window):
54 def __init__ (self):
55 gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
56
57 # Setup tree view and the scrolled window
58 scrolled_window = gtk.ScrolledWindow ()
59 self.add (scrolled_window)
60 self.cur_build_tv = RunningBuildTreeView()
61 self.connect("delete-event", gtk.main_quit)
62 self.set_default_size(640, 480)
63 scrolled_window.add (self.cur_build_tv)
64
65
66def main (server, eventHandler, params):
67 gobject.threads_init()
68 gtk.gdk.threads_init()
69
70 window = MainWindow ()
71 window.show_all ()
72 pbar = ProgressBar(window)
73 pbar.connect("delete-event", gtk.main_quit)
74
75 # Create the object for the current build
76 running_build = RunningBuild ()
77 window.cur_build_tv.set_model (running_build.model)
78 running_build.model.connect("row-inserted", scroll_tv_cb, window.cur_build_tv)
79 running_build.connect ("build-succeeded", running_build_succeeded_cb)
80 running_build.connect ("build-failed", running_build_failed_cb)
81
82 try:
83 params.updateFromServer(server)
84 cmdline = params.parseActions()
85 if not cmdline:
86 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
87 return 1
88 if 'msg' in cmdline and cmdline['msg']:
89 logger.error(cmdline['msg'])
90 return 1
91 cmdline = cmdline['action']
92 ret, error = server.runCommand(cmdline)
93 if error:
94 print("Error running command '%s': %s" % (cmdline, error))
95 return 1
96 elif ret != True:
97 print("Error running command '%s': returned %s" % (cmdline, ret))
98 return 1
99 except xmlrpclib.Fault as x:
100 print("XMLRPC Fault getting commandline:\n %s" % x)
101 return 1
102
103 # Use a timeout function for probing the event queue to find out if we
104 # have a message waiting for us.
105 gobject.timeout_add (100,
106 event_handle_idle_func,
107 eventHandler,
108 running_build,
109 pbar)
110
111 try:
112 gtk.main()
113 except EnvironmentError as ioerror:
114 # ignore interrupted io
115 if ioerror.args[0] == 4:
116 pass
117 except KeyboardInterrupt:
118 pass
119 finally:
120 server.runCommand(["stateForceShutdown"])
121
diff --git a/bitbake/lib/bb/ui/hob.py b/bitbake/lib/bb/ui/hob.py
new file mode 100755
index 0000000000..da5b411891
--- /dev/null
+++ b/bitbake/lib/bb/ui/hob.py
@@ -0,0 +1,109 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2011 Intel Corporation
6#
7# Authored by Joshua Lock <josh@linux.intel.com>
8# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import sys
24import os
25requirements = "FATAL: Hob requires Gtk+ 2.20.0 or higher, PyGtk 2.21.0 or higher"
26try:
27 import gobject
28 import gtk
29 import pygtk
30 pygtk.require('2.0') # to be certain we don't have gtk+ 1.x !?!
31 gtkver = gtk.gtk_version
32 pygtkver = gtk.pygtk_version
33 if gtkver < (2, 20, 0) or pygtkver < (2, 21, 0):
34 sys.exit("%s,\nYou have Gtk+ %s and PyGtk %s." % (requirements,
35 ".".join(map(str, gtkver)),
36 ".".join(map(str, pygtkver))))
37except ImportError as exc:
38 sys.exit("%s (%s)." % (requirements, str(exc)))
39sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
40try:
41 import bb
42except RuntimeError as exc:
43 sys.exit(str(exc))
44from bb.ui import uihelper
45from bb.ui.crumbs.hoblistmodel import RecipeListModel, PackageListModel
46from bb.ui.crumbs.hobeventhandler import HobHandler
47from bb.ui.crumbs.builder import Builder
48
49featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
50
51def event_handle_idle_func(eventHandler, hobHandler):
52 # Consume as many messages as we can in the time available to us
53 if not eventHandler:
54 return False
55 event = eventHandler.getEvent()
56 while event:
57 hobHandler.handle_event(event)
58 event = eventHandler.getEvent()
59 return True
60
61_evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.LogRecord",
62 "bb.build.TaskFailed", "bb.build.TaskBase", "bb.event.ParseStarted",
63 "bb.event.ParseProgress", "bb.event.ParseCompleted", "bb.event.CacheLoadStarted",
64 "bb.event.CacheLoadProgress", "bb.event.CacheLoadCompleted", "bb.command.CommandFailed",
65 "bb.command.CommandExit", "bb.command.CommandCompleted", "bb.cooker.CookerExit",
66 "bb.event.MultipleProviders", "bb.event.NoProvider", "bb.runqueue.sceneQueueTaskStarted",
67 "bb.runqueue.runQueueTaskStarted", "bb.runqueue.runQueueTaskFailed", "bb.runqueue.sceneQueueTaskFailed",
68 "bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent",
69 "bb.event.SanityCheckPassed", "bb.event.SanityCheckFailed", "bb.event.PackageInfo",
70 "bb.event.TargetsTreeGenerated", "bb.event.ConfigFilesFound", "bb.event.ConfigFilePathFound",
71 "bb.event.FilesMatchingFound", "bb.event.NetworkTestFailed", "bb.event.NetworkTestPassed",
72 "bb.event.BuildStarted", "bb.event.BuildCompleted", "bb.event.DiskFull"]
73
74def main (server, eventHandler, params):
75 params.updateFromServer(server)
76 gobject.threads_init()
77
78 # That indicates whether the Hob and the bitbake server are
79 # running on different machines
80 # recipe model and package model
81 recipe_model = RecipeListModel()
82 package_model = PackageListModel()
83
84 llevel, debug_domains = bb.msg.constructLogOptions()
85 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
86 hobHandler = HobHandler(server, recipe_model, package_model)
87 builder = Builder(hobHandler, recipe_model, package_model)
88
89 # This timeout function regularly probes the event queue to find out if we
90 # have any messages waiting for us.
91 gobject.timeout_add(10, event_handle_idle_func, eventHandler, hobHandler)
92
93 try:
94 gtk.main()
95 except EnvironmentError as ioerror:
96 # ignore interrupted io
97 if ioerror.args[0] == 4:
98 pass
99 finally:
100 hobHandler.cancel_build(force = True)
101
102if __name__ == "__main__":
103 try:
104 ret = main()
105 except Exception:
106 ret = 1
107 import traceback
108 traceback.print_exc(15)
109 sys.exit(ret)
diff --git a/bitbake/lib/bb/ui/icons/images/images_display.png b/bitbake/lib/bb/ui/icons/images/images_display.png
new file mode 100644
index 0000000000..a7f87101af
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/images/images_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/images/images_hover.png b/bitbake/lib/bb/ui/icons/images/images_hover.png
new file mode 100644
index 0000000000..2d9cd99b8e
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/images/images_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/add-hover.png b/bitbake/lib/bb/ui/icons/indicators/add-hover.png
new file mode 100644
index 0000000000..526df770d1
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/add-hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/add.png b/bitbake/lib/bb/ui/icons/indicators/add.png
new file mode 100644
index 0000000000..31e7090d61
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/add.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/alert.png b/bitbake/lib/bb/ui/icons/indicators/alert.png
new file mode 100644
index 0000000000..d1c6f55a2f
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/alert.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/confirmation.png b/bitbake/lib/bb/ui/icons/indicators/confirmation.png
new file mode 100644
index 0000000000..3a5402d1e3
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/confirmation.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/denied.png b/bitbake/lib/bb/ui/icons/indicators/denied.png
new file mode 100644
index 0000000000..ee35c7defa
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/denied.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/error.png b/bitbake/lib/bb/ui/icons/indicators/error.png
new file mode 100644
index 0000000000..d06a8c151a
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/error.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/info.png b/bitbake/lib/bb/ui/icons/indicators/info.png
new file mode 100644
index 0000000000..ee8e8d8462
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/info.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/issues.png b/bitbake/lib/bb/ui/icons/indicators/issues.png
new file mode 100644
index 0000000000..b0c7461334
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/issues.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/refresh.png b/bitbake/lib/bb/ui/icons/indicators/refresh.png
new file mode 100644
index 0000000000..eb6c419db8
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/refresh.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/remove-hover.png b/bitbake/lib/bb/ui/icons/indicators/remove-hover.png
new file mode 100644
index 0000000000..aa57c69982
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/remove-hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/remove.png b/bitbake/lib/bb/ui/icons/indicators/remove.png
new file mode 100644
index 0000000000..05c3c293d4
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/remove.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/tick.png b/bitbake/lib/bb/ui/icons/indicators/tick.png
new file mode 100644
index 0000000000..beaad361c3
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/tick.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/info/info_display.png b/bitbake/lib/bb/ui/icons/info/info_display.png
new file mode 100644
index 0000000000..5afbba29f5
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/info/info_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/info/info_hover.png b/bitbake/lib/bb/ui/icons/info/info_hover.png
new file mode 100644
index 0000000000..f9d294dfae
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/info/info_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/layers/layers_display.png b/bitbake/lib/bb/ui/icons/layers/layers_display.png
new file mode 100644
index 0000000000..b7f9053a9e
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/layers/layers_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/layers/layers_hover.png b/bitbake/lib/bb/ui/icons/layers/layers_hover.png
new file mode 100644
index 0000000000..0bf3ce0dbc
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/layers/layers_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/packages/packages_display.png b/bitbake/lib/bb/ui/icons/packages/packages_display.png
new file mode 100644
index 0000000000..f5d0a5064d
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/packages/packages_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/packages/packages_hover.png b/bitbake/lib/bb/ui/icons/packages/packages_hover.png
new file mode 100644
index 0000000000..c081165f34
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/packages/packages_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/recipe/recipe_display.png b/bitbake/lib/bb/ui/icons/recipe/recipe_display.png
new file mode 100644
index 0000000000..e9809bc7d9
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/recipe/recipe_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/recipe/recipe_hover.png b/bitbake/lib/bb/ui/icons/recipe/recipe_hover.png
new file mode 100644
index 0000000000..7e48da9af0
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/recipe/recipe_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/settings/settings_display.png b/bitbake/lib/bb/ui/icons/settings/settings_display.png
new file mode 100644
index 0000000000..88c464db04
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/settings/settings_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/settings/settings_hover.png b/bitbake/lib/bb/ui/icons/settings/settings_hover.png
new file mode 100644
index 0000000000..d92a0bf2c3
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/settings/settings_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/templates/templates_display.png b/bitbake/lib/bb/ui/icons/templates/templates_display.png
new file mode 100644
index 0000000000..153c7afb62
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/templates/templates_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/templates/templates_hover.png b/bitbake/lib/bb/ui/icons/templates/templates_hover.png
new file mode 100644
index 0000000000..afb7165fe5
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/templates/templates_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
new file mode 100644
index 0000000000..41f1ba83af
--- /dev/null
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -0,0 +1,550 @@
1#
2# BitBake (No)TTY UI Implementation
3#
4# Handling output to TTYs or files (no TTY)
5#
6# Copyright (C) 2006-2012 Richard Purdie
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21from __future__ import division
22
23import os
24import sys
25import xmlrpclib
26import logging
27import progressbar
28import signal
29import bb.msg
30import time
31import fcntl
32import struct
33import copy
34import atexit
35from bb.ui import uihelper
36
37featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
38
39logger = logging.getLogger("BitBake")
40interactive = sys.stdout.isatty()
41
42class BBProgress(progressbar.ProgressBar):
43 def __init__(self, msg, maxval):
44 self.msg = msg
45 widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
46 progressbar.ETA()]
47
48 try:
49 self._resize_default = signal.getsignal(signal.SIGWINCH)
50 except:
51 self._resize_default = None
52 progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets, fd=sys.stdout)
53
54 def _handle_resize(self, signum, frame):
55 progressbar.ProgressBar._handle_resize(self, signum, frame)
56 if self._resize_default:
57 self._resize_default(signum, frame)
58 def finish(self):
59 progressbar.ProgressBar.finish(self)
60 if self._resize_default:
61 signal.signal(signal.SIGWINCH, self._resize_default)
62
63class NonInteractiveProgress(object):
64 fobj = sys.stdout
65
66 def __init__(self, msg, maxval):
67 self.msg = msg
68 self.maxval = maxval
69
70 def start(self):
71 self.fobj.write("%s..." % self.msg)
72 self.fobj.flush()
73 return self
74
75 def update(self, value):
76 pass
77
78 def finish(self):
79 self.fobj.write("done.\n")
80 self.fobj.flush()
81
82def new_progress(msg, maxval):
83 if interactive:
84 return BBProgress(msg, maxval)
85 else:
86 return NonInteractiveProgress(msg, maxval)
87
88def pluralise(singular, plural, qty):
89 if(qty == 1):
90 return singular % qty
91 else:
92 return plural % qty
93
94
95class InteractConsoleLogFilter(logging.Filter):
96 def __init__(self, tf, format):
97 self.tf = tf
98 self.format = format
99
100 def filter(self, record):
101 if record.levelno == self.format.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")):
102 return False
103 self.tf.clearFooter()
104 return True
105
106class TerminalFilter(object):
107 columns = 80
108
109 def sigwinch_handle(self, signum, frame):
110 self.columns = self.getTerminalColumns()
111 if self._sigwinch_default:
112 self._sigwinch_default(signum, frame)
113
114 def getTerminalColumns(self):
115 def ioctl_GWINSZ(fd):
116 try:
117 cr = struct.unpack('hh', fcntl.ioctl(fd, self.termios.TIOCGWINSZ, '1234'))
118 except:
119 return None
120 return cr
121 cr = ioctl_GWINSZ(sys.stdout.fileno())
122 if not cr:
123 try:
124 fd = os.open(os.ctermid(), os.O_RDONLY)
125 cr = ioctl_GWINSZ(fd)
126 os.close(fd)
127 except:
128 pass
129 if not cr:
130 try:
131 cr = (env['LINES'], env['COLUMNS'])
132 except:
133 cr = (25, 80)
134 return cr[1]
135
136 def __init__(self, main, helper, console, errconsole, format):
137 self.main = main
138 self.helper = helper
139 self.cuu = None
140 self.stdinbackup = None
141 self.interactive = sys.stdout.isatty()
142 self.footer_present = False
143 self.lastpids = []
144
145 if not self.interactive:
146 return
147
148 try:
149 import curses
150 except ImportError:
151 sys.exit("FATAL: The knotty ui could not load the required curses python module.")
152
153 import termios
154 self.curses = curses
155 self.termios = termios
156 try:
157 fd = sys.stdin.fileno()
158 self.stdinbackup = termios.tcgetattr(fd)
159 new = copy.deepcopy(self.stdinbackup)
160 new[3] = new[3] & ~termios.ECHO
161 termios.tcsetattr(fd, termios.TCSADRAIN, new)
162 curses.setupterm()
163 if curses.tigetnum("colors") > 2:
164 format.enable_color()
165 self.ed = curses.tigetstr("ed")
166 if self.ed:
167 self.cuu = curses.tigetstr("cuu")
168 try:
169 self._sigwinch_default = signal.getsignal(signal.SIGWINCH)
170 signal.signal(signal.SIGWINCH, self.sigwinch_handle)
171 except:
172 pass
173 self.columns = self.getTerminalColumns()
174 except:
175 self.cuu = None
176 console.addFilter(InteractConsoleLogFilter(self, format))
177 errconsole.addFilter(InteractConsoleLogFilter(self, format))
178
179 def clearFooter(self):
180 if self.footer_present:
181 lines = self.footer_present
182 sys.stdout.write(self.curses.tparm(self.cuu, lines))
183 sys.stdout.write(self.curses.tparm(self.ed))
184 self.footer_present = False
185
186 def updateFooter(self):
187 if not self.cuu:
188 return
189 activetasks = self.helper.running_tasks
190 failedtasks = self.helper.failed_tasks
191 runningpids = self.helper.running_pids
192 if self.footer_present and (self.lastcount == self.helper.tasknumber_current) and (self.lastpids == runningpids):
193 return
194 if self.footer_present:
195 self.clearFooter()
196 if (not self.helper.tasknumber_total or self.helper.tasknumber_current == self.helper.tasknumber_total) and not len(activetasks):
197 return
198 tasks = []
199 for t in runningpids:
200 tasks.append("%s (pid %s)" % (activetasks[t]["title"], t))
201
202 if self.main.shutdown:
203 content = "Waiting for %s running tasks to finish:" % len(activetasks)
204 elif not len(activetasks):
205 content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
206 else:
207 content = "Currently %s running tasks (%s of %s):" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
208 print(content)
209 lines = 1 + int(len(content) / (self.columns + 1))
210 for tasknum, task in enumerate(tasks):
211 content = "%s: %s" % (tasknum, task)
212 print(content)
213 lines = lines + 1 + int(len(content) / (self.columns + 1))
214 self.footer_present = lines
215 self.lastpids = runningpids[:]
216 self.lastcount = self.helper.tasknumber_current
217
218 def finish(self):
219 if self.stdinbackup:
220 fd = sys.stdin.fileno()
221 self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup)
222
223def _log_settings_from_server(server):
224 # Get values of variables which control our output
225 includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
226 if error:
227 logger.error("Unable to get the value of BBINCLUDELOGS variable: %s" % error)
228 raise BaseException(error)
229 loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
230 if error:
231 logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
232 raise BaseException(error)
233 consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
234 if error:
235 logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
236 raise BaseException(error)
237 return includelogs, loglines, consolelogfile
238
239_evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.LogRecord",
240 "bb.build.TaskFailed", "bb.build.TaskBase", "bb.event.ParseStarted",
241 "bb.event.ParseProgress", "bb.event.ParseCompleted", "bb.event.CacheLoadStarted",
242 "bb.event.CacheLoadProgress", "bb.event.CacheLoadCompleted", "bb.command.CommandFailed",
243 "bb.command.CommandExit", "bb.command.CommandCompleted", "bb.cooker.CookerExit",
244 "bb.event.MultipleProviders", "bb.event.NoProvider", "bb.runqueue.sceneQueueTaskStarted",
245 "bb.runqueue.runQueueTaskStarted", "bb.runqueue.runQueueTaskFailed", "bb.runqueue.sceneQueueTaskFailed",
246 "bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent"]
247
248def main(server, eventHandler, params, tf = TerminalFilter):
249
250 includelogs, loglines, consolelogfile = _log_settings_from_server(server)
251
252 if sys.stdin.isatty() and sys.stdout.isatty():
253 log_exec_tty = True
254 else:
255 log_exec_tty = False
256
257 helper = uihelper.BBUIHelper()
258
259 console = logging.StreamHandler(sys.stdout)
260 errconsole = logging.StreamHandler(sys.stderr)
261 format_str = "%(levelname)s: %(message)s"
262 format = bb.msg.BBLogFormatter(format_str)
263 bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut)
264 bb.msg.addDefaultlogFilter(errconsole, bb.msg.BBLogFilterStdErr)
265 console.setFormatter(format)
266 errconsole.setFormatter(format)
267 logger.addHandler(console)
268 logger.addHandler(errconsole)
269
270 if params.options.remote_server and params.options.kill_server:
271 server.terminateServer()
272 return
273
274 if consolelogfile and not params.options.show_environment:
275 bb.utils.mkdirhier(os.path.dirname(consolelogfile))
276 conlogformat = bb.msg.BBLogFormatter(format_str)
277 consolelog = logging.FileHandler(consolelogfile)
278 bb.msg.addDefaultlogFilter(consolelog)
279 consolelog.setFormatter(conlogformat)
280 logger.addHandler(consolelog)
281
282 llevel, debug_domains = bb.msg.constructLogOptions()
283 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
284
285 if not params.observe_only:
286 params.updateFromServer(server)
287 cmdline = params.parseActions()
288 if not cmdline:
289 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
290 return 1
291 if 'msg' in cmdline and cmdline['msg']:
292 logger.error(cmdline['msg'])
293 return 1
294
295 ret, error = server.runCommand(cmdline['action'])
296 if error:
297 logger.error("Command '%s' failed: %s" % (cmdline, error))
298 return 1
299 elif ret != True:
300 logger.error("Command '%s' failed: returned %s" % (cmdline, ret))
301 return 1
302
303
304 parseprogress = None
305 cacheprogress = None
306 main.shutdown = 0
307 interrupted = False
308 return_value = 0
309 errors = 0
310 warnings = 0
311 taskfailures = []
312
313 termfilter = tf(main, helper, console, errconsole, format)
314 atexit.register(termfilter.finish)
315
316 while True:
317 try:
318 event = eventHandler.waitEvent(0)
319 if event is None:
320 if main.shutdown > 1:
321 break
322 termfilter.updateFooter()
323 event = eventHandler.waitEvent(0.25)
324 if event is None:
325 continue
326 helper.eventHandler(event)
327 if isinstance(event, bb.runqueue.runQueueExitWait):
328 if not main.shutdown:
329 main.shutdown = 1
330 continue
331 if isinstance(event, bb.event.LogExecTTY):
332 if log_exec_tty:
333 tries = event.retries
334 while tries:
335 print("Trying to run: %s" % event.prog)
336 if os.system(event.prog) == 0:
337 break
338 time.sleep(event.sleep_delay)
339 tries -= 1
340 if tries:
341 continue
342 logger.warn(event.msg)
343 continue
344
345 if isinstance(event, logging.LogRecord):
346 if event.levelno >= format.ERROR:
347 errors = errors + 1
348 return_value = 1
349 elif event.levelno == format.WARNING:
350 warnings = warnings + 1
351 # For "normal" logging conditions, don't show note logs from tasks
352 # but do show them if the user has changed the default log level to
353 # include verbose/debug messages
354 if event.taskpid != 0 and event.levelno <= format.NOTE:
355 continue
356 logger.handle(event)
357 continue
358
359 if isinstance(event, bb.build.TaskFailedSilent):
360 logger.warn("Logfile for failed setscene task is %s" % event.logfile)
361 continue
362 if isinstance(event, bb.build.TaskFailed):
363 return_value = 1
364 logfile = event.logfile
365 if logfile and os.path.exists(logfile):
366 termfilter.clearFooter()
367 bb.error("Logfile of failure stored in: %s" % logfile)
368 if includelogs and not event.errprinted:
369 print("Log data follows:")
370 f = open(logfile, "r")
371 lines = []
372 while True:
373 l = f.readline()
374 if l == '':
375 break
376 l = l.rstrip()
377 if loglines:
378 lines.append(' | %s' % l)
379 if len(lines) > int(loglines):
380 lines.pop(0)
381 else:
382 print('| %s' % l)
383 f.close()
384 if lines:
385 for line in lines:
386 print(line)
387 if isinstance(event, bb.build.TaskBase):
388 logger.info(event._message)
389 continue
390 if isinstance(event, bb.event.ParseStarted):
391 if event.total == 0:
392 continue
393 parseprogress = new_progress("Parsing recipes", event.total).start()
394 continue
395 if isinstance(event, bb.event.ParseProgress):
396 parseprogress.update(event.current)
397 continue
398 if isinstance(event, bb.event.ParseCompleted):
399 if not parseprogress:
400 continue
401
402 parseprogress.finish()
403 print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
404 % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
405 continue
406
407 if isinstance(event, bb.event.CacheLoadStarted):
408 cacheprogress = new_progress("Loading cache", event.total).start()
409 continue
410 if isinstance(event, bb.event.CacheLoadProgress):
411 cacheprogress.update(event.current)
412 continue
413 if isinstance(event, bb.event.CacheLoadCompleted):
414 cacheprogress.finish()
415 print("Loaded %d entries from dependency cache." % event.num_entries)
416 continue
417
418 if isinstance(event, bb.command.CommandFailed):
419 return_value = event.exitcode
420 if event.error:
421 errors = errors + 1
422 logger.error("Command execution failed: %s", event.error)
423 main.shutdown = 2
424 continue
425 if isinstance(event, bb.command.CommandExit):
426 if not return_value:
427 return_value = event.exitcode
428 continue
429 if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
430 main.shutdown = 2
431 continue
432 if isinstance(event, bb.event.MultipleProviders):
433 logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
434 event._item,
435 ", ".join(event._candidates))
436 logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
437 continue
438 if isinstance(event, bb.event.NoProvider):
439 return_value = 1
440 errors = errors + 1
441 if event._runtime:
442 r = "R"
443 else:
444 r = ""
445
446 extra = ''
447 if not event._reasons:
448 if event._close_matches:
449 extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
450
451 if event._dependees:
452 logger.error("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s", r, event._item, ", ".join(event._dependees), r, extra)
453 else:
454 logger.error("Nothing %sPROVIDES '%s'%s", r, event._item, extra)
455 if event._reasons:
456 for reason in event._reasons:
457 logger.error("%s", reason)
458 continue
459
460 if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
461 logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring))
462 continue
463
464 if isinstance(event, bb.runqueue.runQueueTaskStarted):
465 if event.noexec:
466 tasktype = 'noexec task'
467 else:
468 tasktype = 'task'
469 logger.info("Running %s %s of %s (ID: %s, %s)",
470 tasktype,
471 event.stats.completed + event.stats.active +
472 event.stats.failed + 1,
473 event.stats.total, event.taskid, event.taskstring)
474 continue
475
476 if isinstance(event, bb.runqueue.runQueueTaskFailed):
477 taskfailures.append(event.taskstring)
478 logger.error("Task %s (%s) failed with exit code '%s'",
479 event.taskid, event.taskstring, event.exitcode)
480 continue
481
482 if isinstance(event, bb.runqueue.sceneQueueTaskFailed):
483 logger.warn("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead",
484 event.taskid, event.taskstring, event.exitcode)
485 continue
486
487 if isinstance(event, bb.event.DepTreeGenerated):
488 continue
489
490 # ignore
491 if isinstance(event, (bb.event.BuildBase,
492 bb.event.MetadataEvent,
493 bb.event.StampUpdate,
494 bb.event.ConfigParsed,
495 bb.event.RecipeParsed,
496 bb.event.RecipePreFinalise,
497 bb.runqueue.runQueueEvent,
498 bb.event.OperationStarted,
499 bb.event.OperationCompleted,
500 bb.event.OperationProgress,
501 bb.event.DiskFull)):
502 continue
503
504 logger.error("Unknown event: %s", event)
505
506 except EnvironmentError as ioerror:
507 termfilter.clearFooter()
508 # ignore interrupted io
509 if ioerror.args[0] == 4:
510 pass
511 except KeyboardInterrupt:
512 termfilter.clearFooter()
513 if params.observe_only:
514 print("\nKeyboard Interrupt, exiting observer...")
515 main.shutdown = 2
516 if not params.observe_only and main.shutdown == 1:
517 print("\nSecond Keyboard Interrupt, stopping...\n")
518 _, error = server.runCommand(["stateForceShutdown"])
519 if error:
520 logger.error("Unable to cleanly stop: %s" % error)
521 if not params.observe_only and main.shutdown == 0:
522 print("\nKeyboard Interrupt, closing down...\n")
523 interrupted = True
524 _, error = server.runCommand(["stateShutdown"])
525 if error:
526 logger.error("Unable to cleanly shutdown: %s" % error)
527 main.shutdown = main.shutdown + 1
528 pass
529
530 summary = ""
531 if taskfailures:
532 summary += pluralise("\nSummary: %s task failed:",
533 "\nSummary: %s tasks failed:", len(taskfailures))
534 for failure in taskfailures:
535 summary += "\n %s" % failure
536 if warnings:
537 summary += pluralise("\nSummary: There was %s WARNING message shown.",
538 "\nSummary: There were %s WARNING messages shown.", warnings)
539 if return_value and errors:
540 summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
541 "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
542 if summary:
543 print(summary)
544
545 if interrupted:
546 print("Execution was interrupted, returning a non-zero exit code.")
547 if return_value == 0:
548 return_value = 1
549
550 return return_value
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
new file mode 100644
index 0000000000..b6c20ec388
--- /dev/null
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -0,0 +1,373 @@
1#
2# BitBake Curses UI Implementation
3#
4# Implements an ncurses frontend for the BitBake utility.
5#
6# Copyright (C) 2006 Michael 'Mickey' Lauer
7# Copyright (C) 2006-2007 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22"""
23 We have the following windows:
24
25 1.) Main Window: Shows what we are ultimately building and how far we are. Includes status bar
26 2.) Thread Activity Window: Shows one status line for every concurrent bitbake thread.
27 3.) Command Line Window: Contains an interactive command line where you can interact w/ Bitbake.
28
29 Basic window layout is like that:
30
31 |---------------------------------------------------------|
32 | <Main Window> | <Thread Activity Window> |
33 | | 0: foo do_compile complete|
34 | Building Gtk+-2.6.10 | 1: bar do_patch complete |
35 | Status: 60% | ... |
36 | | ... |
37 | | ... |
38 |---------------------------------------------------------|
39 |<Command Line Window> |
40 |>>> which virtual/kernel |
41 |openzaurus-kernel |
42 |>>> _ |
43 |---------------------------------------------------------|
44
45"""
46
47
48from __future__ import division
49import logging
50import os, sys, itertools, time, subprocess
51
52try:
53 import curses
54except ImportError:
55 sys.exit("FATAL: The ncurses ui could not load the required curses python module.")
56
57import bb
58import xmlrpclib
59from bb import ui
60from bb.ui import uihelper
61
62parsespin = itertools.cycle( r'|/-\\' )
63
64X = 0
65Y = 1
66WIDTH = 2
67HEIGHT = 3
68
69MAXSTATUSLENGTH = 32
70
71class NCursesUI:
72 """
73 NCurses UI Class
74 """
75 class Window:
76 """Base Window Class"""
77 def __init__( self, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
78 self.win = curses.newwin( height, width, y, x )
79 self.dimensions = ( x, y, width, height )
80 """
81 if curses.has_colors():
82 color = 1
83 curses.init_pair( color, fg, bg )
84 self.win.bkgdset( ord(' '), curses.color_pair(color) )
85 else:
86 self.win.bkgdset( ord(' '), curses.A_BOLD )
87 """
88 self.erase()
89 self.setScrolling()
90 self.win.noutrefresh()
91
92 def erase( self ):
93 self.win.erase()
94
95 def setScrolling( self, b = True ):
96 self.win.scrollok( b )
97 self.win.idlok( b )
98
99 def setBoxed( self ):
100 self.boxed = True
101 self.win.box()
102 self.win.noutrefresh()
103
104 def setText( self, x, y, text, *args ):
105 self.win.addstr( y, x, text, *args )
106 self.win.noutrefresh()
107
108 def appendText( self, text, *args ):
109 self.win.addstr( text, *args )
110 self.win.noutrefresh()
111
112 def drawHline( self, y ):
113 self.win.hline( y, 0, curses.ACS_HLINE, self.dimensions[WIDTH] )
114 self.win.noutrefresh()
115
116 class DecoratedWindow( Window ):
117 """Base class for windows with a box and a title bar"""
118 def __init__( self, title, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
119 NCursesUI.Window.__init__( self, x+1, y+3, width-2, height-4, fg, bg )
120 self.decoration = NCursesUI.Window( x, y, width, height, fg, bg )
121 self.decoration.setBoxed()
122 self.decoration.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
123 self.setTitle( title )
124
125 def setTitle( self, title ):
126 self.decoration.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
127
128 #-------------------------------------------------------------------------#
129# class TitleWindow( Window ):
130 #-------------------------------------------------------------------------#
131# """Title Window"""
132# def __init__( self, x, y, width, height ):
133# NCursesUI.Window.__init__( self, x, y, width, height )
134# version = bb.__version__
135# title = "BitBake %s" % version
136# credit = "(C) 2003-2007 Team BitBake"
137# #self.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
138# self.win.border()
139# self.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
140# self.setText( 1, 2, credit.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
141
142 #-------------------------------------------------------------------------#
143 class ThreadActivityWindow( DecoratedWindow ):
144 #-------------------------------------------------------------------------#
145 """Thread Activity Window"""
146 def __init__( self, x, y, width, height ):
147 NCursesUI.DecoratedWindow.__init__( self, "Thread Activity", x, y, width, height )
148
149 def setStatus( self, thread, text ):
150 line = "%02d: %s" % ( thread, text )
151 width = self.dimensions[WIDTH]
152 if ( len(line) > width ):
153 line = line[:width-3] + "..."
154 else:
155 line = line.ljust( width )
156 self.setText( 0, thread, line )
157
158 #-------------------------------------------------------------------------#
159 class MainWindow( DecoratedWindow ):
160 #-------------------------------------------------------------------------#
161 """Main Window"""
162 def __init__( self, x, y, width, height ):
163 self.StatusPosition = width - MAXSTATUSLENGTH
164 NCursesUI.DecoratedWindow.__init__( self, None, x, y, width, height )
165 curses.nl()
166
167 def setTitle( self, title ):
168 title = "BitBake %s" % bb.__version__
169 self.decoration.setText( 2, 1, title, curses.A_BOLD )
170 self.decoration.setText( self.StatusPosition - 8, 1, "Status:", curses.A_BOLD )
171
172 def setStatus(self, status):
173 while len(status) < MAXSTATUSLENGTH:
174 status = status + " "
175 self.decoration.setText( self.StatusPosition, 1, status, curses.A_BOLD )
176
177
178 #-------------------------------------------------------------------------#
179 class ShellOutputWindow( DecoratedWindow ):
180 #-------------------------------------------------------------------------#
181 """Interactive Command Line Output"""
182 def __init__( self, x, y, width, height ):
183 NCursesUI.DecoratedWindow.__init__( self, "Command Line Window", x, y, width, height )
184
185 #-------------------------------------------------------------------------#
186 class ShellInputWindow( Window ):
187 #-------------------------------------------------------------------------#
188 """Interactive Command Line Input"""
189 def __init__( self, x, y, width, height ):
190 NCursesUI.Window.__init__( self, x, y, width, height )
191
192# put that to the top again from curses.textpad import Textbox
193# self.textbox = Textbox( self.win )
194# t = threading.Thread()
195# t.run = self.textbox.edit
196# t.start()
197
198 #-------------------------------------------------------------------------#
199 def main(self, stdscr, server, eventHandler, params):
200 #-------------------------------------------------------------------------#
201 height, width = stdscr.getmaxyx()
202
203 # for now split it like that:
204 # MAIN_y + THREAD_y = 2/3 screen at the top
205 # MAIN_x = 2/3 left, THREAD_y = 1/3 right
206 # CLI_y = 1/3 of screen at the bottom
207 # CLI_x = full
208
209 main_left = 0
210 main_top = 0
211 main_height = ( height // 3 * 2 )
212 main_width = ( width // 3 ) * 2
213 clo_left = main_left
214 clo_top = main_top + main_height
215 clo_height = height - main_height - main_top - 1
216 clo_width = width
217 cli_left = main_left
218 cli_top = clo_top + clo_height
219 cli_height = 1
220 cli_width = width
221 thread_left = main_left + main_width
222 thread_top = main_top
223 thread_height = main_height
224 thread_width = width - main_width
225
226 #tw = self.TitleWindow( 0, 0, width, main_top )
227 mw = self.MainWindow( main_left, main_top, main_width, main_height )
228 taw = self.ThreadActivityWindow( thread_left, thread_top, thread_width, thread_height )
229 clo = self.ShellOutputWindow( clo_left, clo_top, clo_width, clo_height )
230 cli = self.ShellInputWindow( cli_left, cli_top, cli_width, cli_height )
231 cli.setText( 0, 0, "BB>" )
232
233 mw.setStatus("Idle")
234
235 helper = uihelper.BBUIHelper()
236 shutdown = 0
237
238 try:
239 params.updateFromServer(server)
240 cmdline = params.parseActions()
241 if not cmdline:
242 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
243 return 1
244 if 'msg' in cmdline and cmdline['msg']:
245 logger.error(cmdline['msg'])
246 return 1
247 cmdline = cmdline['action']
248 ret, error = server.runCommand(cmdline)
249 if error:
250 print("Error running command '%s': %s" % (cmdline, error))
251 return
252 elif ret != True:
253 print("Couldn't get default commandlind! %s" % ret)
254 return
255 except xmlrpclib.Fault as x:
256 print("XMLRPC Fault getting commandline:\n %s" % x)
257 return
258
259 exitflag = False
260 while not exitflag:
261 try:
262 event = eventHandler.waitEvent(0.25)
263 if not event:
264 continue
265
266 helper.eventHandler(event)
267 if isinstance(event, bb.build.TaskBase):
268 mw.appendText("NOTE: %s\n" % event._message)
269 if isinstance(event, logging.LogRecord):
270 mw.appendText(logging.getLevelName(event.levelno) + ': ' + event.getMessage() + '\n')
271
272 if isinstance(event, bb.event.CacheLoadStarted):
273 self.parse_total = event.total
274 if isinstance(event, bb.event.CacheLoadProgress):
275 x = event.current
276 y = self.parse_total
277 mw.setStatus("Loading Cache: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
278 if isinstance(event, bb.event.CacheLoadCompleted):
279 mw.setStatus("Idle")
280 mw.appendText("Loaded %d entries from dependency cache.\n"
281 % ( event.num_entries))
282
283 if isinstance(event, bb.event.ParseStarted):
284 self.parse_total = event.total
285 if isinstance(event, bb.event.ParseProgress):
286 x = event.current
287 y = self.parse_total
288 mw.setStatus("Parsing Recipes: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
289 if isinstance(event, bb.event.ParseCompleted):
290 mw.setStatus("Idle")
291 mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked.\n"
292 % ( event.cached, event.parsed, event.skipped, event.masked ))
293
294# if isinstance(event, bb.build.TaskFailed):
295# if event.logfile:
296# if data.getVar("BBINCLUDELOGS", d):
297# bb.error("log data follows (%s)" % logfile)
298# number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
299# if number_of_lines:
300# subprocess.call('tail -n%s %s' % (number_of_lines, logfile), shell=True)
301# else:
302# f = open(logfile, "r")
303# while True:
304# l = f.readline()
305# if l == '':
306# break
307# l = l.rstrip()
308# print '| %s' % l
309# f.close()
310# else:
311# bb.error("see log in %s" % logfile)
312
313 if isinstance(event, bb.command.CommandCompleted):
314 # stop so the user can see the result of the build, but
315 # also allow them to now exit with a single ^C
316 shutdown = 2
317 if isinstance(event, bb.command.CommandFailed):
318 mw.appendText("Command execution failed: %s" % event.error)
319 time.sleep(2)
320 exitflag = True
321 if isinstance(event, bb.command.CommandExit):
322 exitflag = True
323 if isinstance(event, bb.cooker.CookerExit):
324 exitflag = True
325
326 if isinstance(event, bb.event.LogExecTTY):
327 mw.appendText('WARN: ' + event.msg + '\n')
328 if helper.needUpdate:
329 activetasks, failedtasks = helper.getTasks()
330 taw.erase()
331 taw.setText(0, 0, "")
332 if activetasks:
333 taw.appendText("Active Tasks:\n")
334 for task in activetasks.itervalues():
335 taw.appendText(task["title"] + '\n')
336 if failedtasks:
337 taw.appendText("Failed Tasks:\n")
338 for task in failedtasks:
339 taw.appendText(task["title"] + '\n')
340
341 curses.doupdate()
342 except EnvironmentError as ioerror:
343 # ignore interrupted io
344 if ioerror.args[0] == 4:
345 pass
346
347 except KeyboardInterrupt:
348 if shutdown == 2:
349 mw.appendText("Third Keyboard Interrupt, exit.\n")
350 exitflag = True
351 if shutdown == 1:
352 mw.appendText("Second Keyboard Interrupt, stopping...\n")
353 _, error = server.runCommand(["stateForceShutdown"])
354 if error:
355 print("Unable to cleanly stop: %s" % error)
356 if shutdown == 0:
357 mw.appendText("Keyboard Interrupt, closing down...\n")
358 _, error = server.runCommand(["stateShutdown"])
359 if error:
360 print("Unable to cleanly shutdown: %s" % error)
361 shutdown = shutdown + 1
362 pass
363
364def main(server, eventHandler):
365 if not os.isatty(sys.stdout.fileno()):
366 print("FATAL: Unable to run 'ncurses' UI without a TTY.")
367 return
368 ui = NCursesUI()
369 try:
370 curses.wrapper(ui.main, server, eventHandler)
371 except:
372 import traceback
373 traceback.print_exc()
diff --git a/bitbake/lib/bb/ui/puccho.py b/bitbake/lib/bb/ui/puccho.py
new file mode 100644
index 0000000000..3ce4590c16
--- /dev/null
+++ b/bitbake/lib/bb/ui/puccho.py
@@ -0,0 +1,425 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2008 Intel Corporation
5#
6# Authored by Rob Bradford <rob@linux.intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22import gobject
23import gtk.glade
24import threading
25import urllib2
26import os
27import contextlib
28
29from bb.ui.crumbs.buildmanager import BuildManager, BuildConfiguration
30from bb.ui.crumbs.buildmanager import BuildManagerTreeView
31
32from bb.ui.crumbs.runningbuild import RunningBuild, RunningBuildTreeView
33
34# The metadata loader is used by the BuildSetupDialog to download the
35# available options to populate the dialog
36class MetaDataLoader(gobject.GObject):
37 """ This class provides the mechanism for loading the metadata (the
38 fetching and parsing) from a given URL. The metadata encompasses details
39 on what machines are available. The distribution and images available for
40 the machine and the the uris to use for building the given machine."""
41 __gsignals__ = {
42 'success' : (gobject.SIGNAL_RUN_LAST,
43 gobject.TYPE_NONE,
44 ()),
45 'error' : (gobject.SIGNAL_RUN_LAST,
46 gobject.TYPE_NONE,
47 (gobject.TYPE_STRING,))
48 }
49
50 # We use these little helper functions to ensure that we take the gdk lock
51 # when emitting the signal. These functions are called as idles (so that
52 # they happen in the gtk / main thread's main loop.
53 def emit_error_signal (self, remark):
54 gtk.gdk.threads_enter()
55 self.emit ("error", remark)
56 gtk.gdk.threads_leave()
57
58 def emit_success_signal (self):
59 gtk.gdk.threads_enter()
60 self.emit ("success")
61 gtk.gdk.threads_leave()
62
63 def __init__ (self):
64 gobject.GObject.__init__ (self)
65
66 class LoaderThread(threading.Thread):
67 """ This class provides an asynchronous loader for the metadata (by
68 using threads and signals). This is useful since the metadata may be
69 at a remote URL."""
70 class LoaderImportException (Exception):
71 pass
72
73 def __init__(self, loader, url):
74 threading.Thread.__init__ (self)
75 self.url = url
76 self.loader = loader
77
78 def run (self):
79 result = {}
80 try:
81 with contextlib.closing (urllib2.urlopen (self.url)) as f:
82 # Parse the metadata format. The format is....
83 # <machine>;<default distro>|<distro>...;<default image>|<image>...;<type##url>|...
84 for line in f:
85 components = line.split(";")
86 if (len (components) < 4):
87 raise MetaDataLoader.LoaderThread.LoaderImportException
88 machine = components[0]
89 distros = components[1].split("|")
90 images = components[2].split("|")
91 urls = components[3].split("|")
92
93 result[machine] = (distros, images, urls)
94
95 # Create an object representing this *potential*
96 # configuration. It can become concrete if the machine, distro
97 # and image are all chosen in the UI
98 configuration = BuildConfiguration()
99 configuration.metadata_url = self.url
100 configuration.machine_options = result
101 self.loader.configuration = configuration
102
103 # Emit that we've actually got a configuration
104 gobject.idle_add (MetaDataLoader.emit_success_signal,
105 self.loader)
106
107 except MetaDataLoader.LoaderThread.LoaderImportException as e:
108 gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
109 "Repository metadata corrupt")
110 except Exception as e:
111 gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
112 "Unable to download repository metadata")
113 print(e)
114
115 def try_fetch_from_url (self, url):
116 # Try and download the metadata. Firing a signal if successful
117 thread = MetaDataLoader.LoaderThread(self, url)
118 thread.start()
119
120class BuildSetupDialog (gtk.Dialog):
121 RESPONSE_BUILD = 1
122
123 # A little helper method that just sets the states on the widgets based on
124 # whether we've got good metadata or not.
125 def set_configurable (self, configurable):
126 if (self.configurable == configurable):
127 return
128
129 self.configurable = configurable
130 for widget in self.conf_widgets:
131 widget.set_sensitive (configurable)
132
133 if not configurable:
134 self.machine_combo.set_active (-1)
135 self.distribution_combo.set_active (-1)
136 self.image_combo.set_active (-1)
137
138 # GTK widget callbacks
139 def refresh_button_clicked (self, button):
140 # Refresh button clicked.
141
142 url = self.location_entry.get_chars (0, -1)
143 self.loader.try_fetch_from_url(url)
144
145 def repository_entry_editable_changed (self, entry):
146 if (len (entry.get_chars (0, -1)) > 0):
147 self.refresh_button.set_sensitive (True)
148 else:
149 self.refresh_button.set_sensitive (False)
150 self.clear_status_message()
151
152 # If we were previously configurable we are no longer since the
153 # location entry has been changed
154 self.set_configurable (False)
155
156 def machine_combo_changed (self, combobox):
157 active_iter = combobox.get_active_iter()
158
159 if not active_iter:
160 return
161
162 model = combobox.get_model()
163
164 if model:
165 chosen_machine = model.get (active_iter, 0)[0]
166
167 (distros_model, images_model) = \
168 self.loader.configuration.get_distro_and_images_models (chosen_machine)
169
170 self.distribution_combo.set_model (distros_model)
171 self.image_combo.set_model (images_model)
172
173 # Callbacks from the loader
174 def loader_success_cb (self, loader):
175 self.status_image.set_from_icon_name ("info",
176 gtk.ICON_SIZE_BUTTON)
177 self.status_image.show()
178 self.status_label.set_label ("Repository metadata successfully downloaded")
179
180 # Set the models on the combo boxes based on the models generated from
181 # the configuration that the loader has created
182
183 # We just need to set the machine here, that then determines the
184 # distro and image options. Cunning huh? :-)
185
186 self.configuration = self.loader.configuration
187 model = self.configuration.get_machines_model ()
188 self.machine_combo.set_model (model)
189
190 self.set_configurable (True)
191
192 def loader_error_cb (self, loader, message):
193 self.status_image.set_from_icon_name ("error",
194 gtk.ICON_SIZE_BUTTON)
195 self.status_image.show()
196 self.status_label.set_text ("Error downloading repository metadata")
197 for widget in self.conf_widgets:
198 widget.set_sensitive (False)
199
200 def clear_status_message (self):
201 self.status_image.hide()
202 self.status_label.set_label (
203 """<i>Enter the repository location and press _Refresh</i>""")
204
205 def __init__ (self):
206 gtk.Dialog.__init__ (self)
207
208 # Cancel
209 self.add_button (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
210
211 # Build
212 button = gtk.Button ("_Build", None, True)
213 image = gtk.Image ()
214 image.set_from_stock (gtk.STOCK_EXECUTE, gtk.ICON_SIZE_BUTTON)
215 button.set_image (image)
216 self.add_action_widget (button, BuildSetupDialog.RESPONSE_BUILD)
217 button.show_all ()
218
219 # Pull in *just* the table from the Glade XML data.
220 gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
221 root = "build_table")
222 table = gxml.get_widget ("build_table")
223 self.vbox.pack_start (table, True, False, 0)
224
225 # Grab all the widgets that we need to turn on/off when we refresh...
226 self.conf_widgets = []
227 self.conf_widgets += [gxml.get_widget ("machine_label")]
228 self.conf_widgets += [gxml.get_widget ("distribution_label")]
229 self.conf_widgets += [gxml.get_widget ("image_label")]
230 self.conf_widgets += [gxml.get_widget ("machine_combo")]
231 self.conf_widgets += [gxml.get_widget ("distribution_combo")]
232 self.conf_widgets += [gxml.get_widget ("image_combo")]
233
234 # Grab the status widgets
235 self.status_image = gxml.get_widget ("status_image")
236 self.status_label = gxml.get_widget ("status_label")
237
238 # Grab the refresh button and connect to the clicked signal
239 self.refresh_button = gxml.get_widget ("refresh_button")
240 self.refresh_button.connect ("clicked", self.refresh_button_clicked)
241
242 # Grab the location entry and connect to editable::changed
243 self.location_entry = gxml.get_widget ("location_entry")
244 self.location_entry.connect ("changed",
245 self.repository_entry_editable_changed)
246
247 # Grab the machine combo and hook onto the changed signal. This then
248 # allows us to populate the distro and image combos
249 self.machine_combo = gxml.get_widget ("machine_combo")
250 self.machine_combo.connect ("changed", self.machine_combo_changed)
251
252 # Setup the combo
253 cell = gtk.CellRendererText()
254 self.machine_combo.pack_start(cell, True)
255 self.machine_combo.add_attribute(cell, 'text', 0)
256
257 # Grab the distro and image combos. We need these to populate with
258 # models once the machine is chosen
259 self.distribution_combo = gxml.get_widget ("distribution_combo")
260 cell = gtk.CellRendererText()
261 self.distribution_combo.pack_start(cell, True)
262 self.distribution_combo.add_attribute(cell, 'text', 0)
263
264 self.image_combo = gxml.get_widget ("image_combo")
265 cell = gtk.CellRendererText()
266 self.image_combo.pack_start(cell, True)
267 self.image_combo.add_attribute(cell, 'text', 0)
268
269 # Put the default descriptive text in the status box
270 self.clear_status_message()
271
272 # Mark as non-configurable, this is just greys out the widgets the
273 # user can't yet use
274 self.configurable = False
275 self.set_configurable(False)
276
277 # Show the table
278 table.show_all ()
279
280 # The loader and some signals connected to it to update the status
281 # area
282 self.loader = MetaDataLoader()
283 self.loader.connect ("success", self.loader_success_cb)
284 self.loader.connect ("error", self.loader_error_cb)
285
286 def update_configuration (self):
287 """ A poorly named function but it updates the internal configuration
288 from the widgets. This can make that configuration concrete and can
289 thus be used for building """
290 # Extract the chosen machine from the combo
291 model = self.machine_combo.get_model()
292 active_iter = self.machine_combo.get_active_iter()
293 if (active_iter):
294 self.configuration.machine = model.get(active_iter, 0)[0]
295
296 # Extract the chosen distro from the combo
297 model = self.distribution_combo.get_model()
298 active_iter = self.distribution_combo.get_active_iter()
299 if (active_iter):
300 self.configuration.distro = model.get(active_iter, 0)[0]
301
302 # Extract the chosen image from the combo
303 model = self.image_combo.get_model()
304 active_iter = self.image_combo.get_active_iter()
305 if (active_iter):
306 self.configuration.image = model.get(active_iter, 0)[0]
307
308# This function operates to pull events out from the event queue and then push
309# them into the RunningBuild (which then drives the RunningBuild which then
310# pushes through and updates the progress tree view.)
311#
312# TODO: Should be a method on the RunningBuild class
313def event_handle_timeout (eventHandler, build):
314 # Consume as many messages as we can ...
315 event = eventHandler.getEvent()
316 while event:
317 build.handle_event (event)
318 event = eventHandler.getEvent()
319 return True
320
321class MainWindow (gtk.Window):
322
323 # Callback that gets fired when the user hits a button in the
324 # BuildSetupDialog.
325 def build_dialog_box_response_cb (self, dialog, response_id):
326 conf = None
327 if (response_id == BuildSetupDialog.RESPONSE_BUILD):
328 dialog.update_configuration()
329 print(dialog.configuration.machine, dialog.configuration.distro, \
330 dialog.configuration.image)
331 conf = dialog.configuration
332
333 dialog.destroy()
334
335 if conf:
336 self.manager.do_build (conf)
337
338 def build_button_clicked_cb (self, button):
339 dialog = BuildSetupDialog ()
340
341 # For some unknown reason Dialog.run causes nice little deadlocks ... :-(
342 dialog.connect ("response", self.build_dialog_box_response_cb)
343 dialog.show()
344
345 def __init__ (self):
346 gtk.Window.__init__ (self)
347
348 # Pull in *just* the main vbox from the Glade XML data and then pack
349 # that inside the window
350 gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
351 root = "main_window_vbox")
352 vbox = gxml.get_widget ("main_window_vbox")
353 self.add (vbox)
354
355 # Create the tree views for the build manager view and the progress view
356 self.build_manager_view = BuildManagerTreeView()
357 self.running_build_view = RunningBuildTreeView()
358
359 # Grab the scrolled windows that we put the tree views into
360 self.results_scrolledwindow = gxml.get_widget ("results_scrolledwindow")
361 self.progress_scrolledwindow = gxml.get_widget ("progress_scrolledwindow")
362
363 # Put the tree views inside ...
364 self.results_scrolledwindow.add (self.build_manager_view)
365 self.progress_scrolledwindow.add (self.running_build_view)
366
367 # Hook up the build button...
368 self.build_button = gxml.get_widget ("main_toolbutton_build")
369 self.build_button.connect ("clicked", self.build_button_clicked_cb)
370
371# I'm not very happy about the current ownership of the RunningBuild. I have
372# my suspicions that this object should be held by the BuildManager since we
373# care about the signals in the manager
374
375def running_build_succeeded_cb (running_build, manager):
376 # Notify the manager that a build has succeeded. This is necessary as part
377 # of the 'hack' that we use for making the row in the model / view
378 # representing the ongoing build change into a row representing the
379 # completed build. Since we know only one build can be running a time then
380 # we can handle this.
381
382 # FIXME: Refactor all this so that the RunningBuild is owned by the
383 # BuildManager. It can then hook onto the signals directly and drive
384 # interesting things it cares about.
385 manager.notify_build_succeeded ()
386 print("build succeeded")
387
388def running_build_failed_cb (running_build, manager):
389 # As above
390 print("build failed")
391 manager.notify_build_failed ()
392
393def main (server, eventHandler):
394 # Initialise threading...
395 gobject.threads_init()
396 gtk.gdk.threads_init()
397
398 main_window = MainWindow ()
399 main_window.show_all ()
400
401 # Set up the build manager stuff in general
402 builds_dir = os.path.join (os.getcwd(), "results")
403 manager = BuildManager (server, builds_dir)
404 main_window.build_manager_view.set_model (manager.model)
405
406 # Do the running build setup
407 running_build = RunningBuild ()
408 main_window.running_build_view.set_model (running_build.model)
409 running_build.connect ("build-succeeded", running_build_succeeded_cb,
410 manager)
411 running_build.connect ("build-failed", running_build_failed_cb, manager)
412
413 # We need to save the manager into the MainWindow so that the toolbar
414 # button can use it.
415 # FIXME: Refactor ?
416 main_window.manager = manager
417
418 # Use a timeout function for probing the event queue to find out if we
419 # have a message waiting for us.
420 gobject.timeout_add (200,
421 event_handle_timeout,
422 eventHandler,
423 running_build)
424
425 gtk.main()
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py
new file mode 100644
index 0000000000..9a9fe6f2d8
--- /dev/null
+++ b/bitbake/lib/bb/ui/toasterui.py
@@ -0,0 +1,292 @@
1#
2# BitBake ToasterUI Implementation
3# based on (No)TTY UI Implementation by Richard Purdie
4#
5# Handling output to TTYs or files (no TTY)
6#
7# Copyright (C) 2006-2012 Richard Purdie
8# Copyright (C) 2013 Intel Corporation
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23from __future__ import division
24try:
25 import bb
26except RuntimeError as exc:
27 sys.exit(str(exc))
28
29from bb.ui import uihelper
30from bb.ui.buildinfohelper import BuildInfoHelper
31
32import bb.msg
33import copy
34import fcntl
35import logging
36import os
37import progressbar
38import signal
39import struct
40import sys
41import time
42import xmlrpclib
43
44featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
45
46logger = logging.getLogger("BitBake")
47interactive = sys.stdout.isatty()
48
49
50
51def _log_settings_from_server(server):
52 # Get values of variables which control our output
53 includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
54 if error:
55 logger.error("Unable to get the value of BBINCLUDELOGS variable: %s" % error)
56 raise BaseException(error)
57 loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
58 if error:
59 logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
60 raise BaseException(error)
61 return includelogs, loglines
62
63def main(server, eventHandler, params ):
64
65 includelogs, loglines = _log_settings_from_server(server)
66
67 # verify and warn
68 build_history_enabled = True
69 inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
70 if not "buildhistory" in inheritlist.split(" "):
71 logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
72 build_history_enabled = False
73
74 helper = uihelper.BBUIHelper()
75
76 console = logging.StreamHandler(sys.stdout)
77 format_str = "%(levelname)s: %(message)s"
78 format = bb.msg.BBLogFormatter(format_str)
79 bb.msg.addDefaultlogFilter(console)
80 console.setFormatter(format)
81 logger.addHandler(console)
82
83 if not params.observe_only:
84 logger.error("ToasterUI can only work in observer mode")
85 return
86
87
88 main.shutdown = 0
89 interrupted = False
90 return_value = 0
91 errors = 0
92 warnings = 0
93 taskfailures = []
94
95 buildinfohelper = BuildInfoHelper(server, build_history_enabled)
96
97
98 while True:
99 try:
100 event = eventHandler.waitEvent(0.25)
101
102 if event is None:
103 if main.shutdown > 0:
104 break
105 continue
106
107 helper.eventHandler(event)
108
109 if isinstance(event, bb.event.BuildStarted):
110 buildinfohelper.store_started_build(event)
111
112 if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
113 buildinfohelper.update_and_store_task(event)
114 continue
115
116 if isinstance(event, bb.event.LogExecTTY):
117 logger.warn(event.msg)
118 continue
119
120 if isinstance(event, logging.LogRecord):
121 buildinfohelper.store_log_event(event)
122 if event.levelno >= format.ERROR:
123 errors = errors + 1
124 return_value = 1
125 elif event.levelno == format.WARNING:
126 warnings = warnings + 1
127 # For "normal" logging conditions, don't show note logs from tasks
128 # but do show them if the user has changed the default log level to
129 # include verbose/debug messages
130 if event.taskpid != 0 and event.levelno <= format.NOTE:
131 continue
132
133 logger.handle(event)
134 continue
135
136 if isinstance(event, bb.build.TaskFailed):
137 buildinfohelper.update_and_store_task(event)
138 return_value = 1
139 logfile = event.logfile
140 if logfile and os.path.exists(logfile):
141 bb.error("Logfile of failure stored in: %s" % logfile)
142 continue
143
144 # these events are unprocessed now, but may be used in the future to log
145 # timing and error informations from the parsing phase in Toaster
146 if isinstance(event, (bb.event.SanityCheckPassed, bb.event.SanityCheck)):
147 continue
148 if isinstance(event, bb.event.ParseStarted):
149 continue
150 if isinstance(event, bb.event.ParseProgress):
151 continue
152 if isinstance(event, bb.event.ParseCompleted):
153 continue
154 if isinstance(event, bb.event.CacheLoadStarted):
155 continue
156 if isinstance(event, bb.event.CacheLoadProgress):
157 continue
158 if isinstance(event, bb.event.CacheLoadCompleted):
159 continue
160 if isinstance(event, bb.event.MultipleProviders):
161 continue
162 if isinstance(event, bb.event.NoProvider):
163 return_value = 1
164 errors = errors + 1
165 if event._runtime:
166 r = "R"
167 else:
168 r = ""
169
170 if event._dependees:
171 text = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)" % (r, event._item, ", ".join(event._dependees), r)
172 else:
173 text = "Nothing %sPROVIDES '%s'" % (r, event._item)
174
175 logger.error(text)
176 if event._reasons:
177 for reason in event._reasons:
178 logger.error("%s", reason)
179 text += reason
180 buildinfohelper.store_log_error(text)
181 continue
182
183 if isinstance(event, bb.event.ConfigParsed):
184 continue
185 if isinstance(event, bb.event.RecipeParsed):
186 continue
187
188 # end of saved events
189
190 if isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped)):
191 buildinfohelper.store_started_task(event)
192 continue
193
194 if isinstance(event, bb.runqueue.runQueueTaskCompleted):
195 buildinfohelper.update_and_store_task(event)
196 continue
197
198 if isinstance(event, bb.runqueue.runQueueTaskFailed):
199 buildinfohelper.update_and_store_task(event)
200 taskfailures.append(event.taskstring)
201 logger.error("Task %s (%s) failed with exit code '%s'",
202 event.taskid, event.taskstring, event.exitcode)
203 continue
204
205 if isinstance(event, (bb.runqueue.sceneQueueTaskCompleted, bb.runqueue.sceneQueueTaskFailed)):
206 buildinfohelper.update_and_store_task(event)
207 continue
208
209
210 if isinstance(event, (bb.event.TreeDataPreparationStarted, bb.event.TreeDataPreparationCompleted)):
211 continue
212
213 if isinstance(event, (bb.event.BuildCompleted)):
214 continue
215
216 if isinstance(event, (bb.command.CommandCompleted,
217 bb.command.CommandFailed,
218 bb.command.CommandExit)):
219 if (isinstance(event, bb.command.CommandFailed)):
220 event.levelno = format.ERROR
221 event.msg = event.error
222 event.pathname = ""
223 event.lineno = 0
224 buildinfohelper.store_log_event(event)
225 errors += 1
226
227 buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
228
229 # we start a new build info
230 errors = 0
231 warnings = 0
232 taskfailures = []
233 buildinfohelper = BuildInfoHelper(server, build_history_enabled)
234 continue
235
236 if isinstance(event, bb.event.MetadataEvent):
237 if event.type == "SinglePackageInfo":
238 buildinfohelper.store_build_package_information(event)
239 elif event.type == "LayerInfo":
240 buildinfohelper.store_layer_info(event)
241 elif event.type == "BuildStatsList":
242 buildinfohelper.store_tasks_stats(event)
243 elif event.type == "ImagePkgList":
244 buildinfohelper.store_target_package_data(event)
245 elif event.type == "MissedSstate":
246 buildinfohelper.store_missed_state_tasks(event)
247 elif event.type == "ImageFileSize":
248 buildinfohelper.update_target_image_file(event)
249 elif event.type == "LicenseManifestPath":
250 buildinfohelper.store_license_manifest_path(event)
251 continue
252
253 if isinstance(event, bb.cooker.CookerExit):
254 # exit when the server exits
255 break
256
257 # ignore
258 if isinstance(event, (bb.event.BuildBase,
259 bb.event.StampUpdate,
260 bb.event.RecipePreFinalise,
261 bb.runqueue.runQueueEvent,
262 bb.runqueue.runQueueExitWait,
263 bb.event.OperationProgress,
264 bb.command.CommandFailed,
265 bb.command.CommandExit,
266 bb.command.CommandCompleted)):
267 continue
268
269 if isinstance(event, bb.event.DepTreeGenerated):
270 buildinfohelper.store_dependency_information(event)
271 continue
272
273 logger.error("Unknown event: %s", event)
274
275 except EnvironmentError as ioerror:
276 # ignore interrupted io
277 if ioerror.args[0] == 4:
278 pass
279 except KeyboardInterrupt:
280 main.shutdown = 1
281 pass
282 except Exception as e:
283 logger.error(e)
284 import traceback
285 traceback.print_exc()
286 pass
287
288 if interrupted:
289 if return_value == 0:
290 return_value = 1
291
292 return return_value
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
new file mode 100644
index 0000000000..98658f68bf
--- /dev/null
+++ b/bitbake/lib/bb/ui/uievent.py
@@ -0,0 +1,133 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
5# Copyright (C) 2006 - 2007 Richard Purdie
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20
21"""
22Use this class to fork off a thread to recieve event callbacks from the bitbake
23server and queue them for the UI to process. This process must be used to avoid
24client/server deadlocks.
25"""
26
27import socket, threading, pickle
28from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
29
30class BBUIEventQueue:
31 def __init__(self, BBServer, clientinfo=("localhost, 0")):
32
33 self.eventQueue = []
34 self.eventQueueLock = threading.Lock()
35 self.eventQueueNotify = threading.Event()
36
37 self.BBServer = BBServer
38 self.clientinfo = clientinfo
39
40 server = UIXMLRPCServer(self.clientinfo)
41 self.host, self.port = server.socket.getsockname()
42
43 server.register_function( self.system_quit, "event.quit" )
44 server.register_function( self.send_event, "event.sendpickle" )
45 server.socket.settimeout(1)
46
47 self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
48
49 if (self.EventHandle == None):
50 bb.fatal("Could not register UI event handler")
51
52 self.server = server
53
54 self.t = threading.Thread()
55 self.t.setDaemon(True)
56 self.t.run = self.startCallbackHandler
57 self.t.start()
58
59 def getEvent(self):
60
61 self.eventQueueLock.acquire()
62
63 if len(self.eventQueue) == 0:
64 self.eventQueueLock.release()
65 return None
66
67 item = self.eventQueue.pop(0)
68
69 if len(self.eventQueue) == 0:
70 self.eventQueueNotify.clear()
71
72 self.eventQueueLock.release()
73 return item
74
75 def waitEvent(self, delay):
76 self.eventQueueNotify.wait(delay)
77 return self.getEvent()
78
79 def queue_event(self, event):
80 self.eventQueueLock.acquire()
81 self.eventQueue.append(event)
82 self.eventQueueNotify.set()
83 self.eventQueueLock.release()
84
85 def send_event(self, event):
86 self.queue_event(pickle.loads(event))
87
88 def startCallbackHandler(self):
89
90 self.server.timeout = 1
91 while not self.server.quit:
92 self.server.handle_request()
93 self.server.server_close()
94
95 def system_quit( self ):
96 """
97 Shut down the callback thread
98 """
99 try:
100 self.BBServer.unregisterEventHandler(self.EventHandle)
101 except:
102 pass
103 self.server.quit = True
104
105class UIXMLRPCServer (SimpleXMLRPCServer):
106
107 def __init__( self, interface ):
108 self.quit = False
109 SimpleXMLRPCServer.__init__( self,
110 interface,
111 requestHandler=SimpleXMLRPCRequestHandler,
112 logRequests=False, allow_none=True)
113
114 def get_request(self):
115 while not self.quit:
116 try:
117 sock, addr = self.socket.accept()
118 sock.settimeout(1)
119 return (sock, addr)
120 except socket.timeout:
121 pass
122 return (None, None)
123
124 def close_request(self, request):
125 if request is None:
126 return
127 SimpleXMLRPCServer.close_request(self, request)
128
129 def process_request(self, request, client_address):
130 if request is None:
131 return
132 SimpleXMLRPCServer.process_request(self, request, client_address)
133
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
new file mode 100644
index 0000000000..a703387fb8
--- /dev/null
+++ b/bitbake/lib/bb/ui/uihelper.py
@@ -0,0 +1,100 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
5# Copyright (C) 2006 - 2007 Richard Purdie
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20import bb.build
21
22class BBUIHelper:
23 def __init__(self):
24 self.needUpdate = False
25 self.running_tasks = {}
26 # Running PIDs preserves the order tasks were executed in
27 self.running_pids = []
28 self.failed_tasks = []
29 self.tasknumber_current = 0
30 self.tasknumber_total = 0
31
32 def eventHandler(self, event):
33 if isinstance(event, bb.build.TaskStarted):
34 self.running_tasks[event.pid] = { 'title' : "%s %s" % (event._package, event._task) }
35 self.running_pids.append(event.pid)
36 self.needUpdate = True
37 if isinstance(event, bb.build.TaskSucceeded):
38 del self.running_tasks[event.pid]
39 self.running_pids.remove(event.pid)
40 self.needUpdate = True
41 if isinstance(event, bb.build.TaskFailedSilent):
42 del self.running_tasks[event.pid]
43 self.running_pids.remove(event.pid)
44 # Don't add to the failed tasks list since this is e.g. a setscene task failure
45 self.needUpdate = True
46 if isinstance(event, bb.build.TaskFailed):
47 del self.running_tasks[event.pid]
48 self.running_pids.remove(event.pid)
49 self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
50 self.needUpdate = True
51 if isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
52 self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1
53 self.tasknumber_total = event.stats.total
54 self.needUpdate = True
55
56 def getTasks(self):
57 self.needUpdate = False
58 return (self.running_tasks, self.failed_tasks)
59
60 def findServerDetails(self):
61 import sys
62 import optparse
63 from bb.server.xmlrpc import BitbakeServerInfo, BitBakeServerConnection
64 host = ""
65 port = 0
66 bind = ""
67 parser = optparse.OptionParser(
68 usage = """%prog -H host -P port -B bindaddr""")
69
70 parser.add_option("-H", "--host", help = "Bitbake server's IP address",
71 action = "store", dest = "host", default = None)
72
73 parser.add_option("-P", "--port", help = "Bitbake server's Port number",
74 action = "store", dest = "port", default = None)
75
76 parser.add_option("-B", "--bind", help = "Hob2 local bind address",
77 action = "store", dest = "bind", default = None)
78
79 options, args = parser.parse_args(sys.argv)
80 for key, val in options.__dict__.items():
81 if key == 'host' and val:
82 host = val
83 elif key == 'port' and val:
84 port = int(val)
85 elif key == 'bind' and val:
86 bind = val
87
88 if not host or not port or not bind:
89 parser.print_usage()
90 sys.exit(1)
91
92 serverinfo = BitbakeServerInfo(host, port)
93 clientinfo = (bind, 0)
94 connection = BitBakeServerConnection(serverinfo, clientinfo)
95
96 server = connection.connection
97 eventHandler = connection.events
98
99 return server, eventHandler, host, bind
100
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
new file mode 100644
index 0000000000..0be45e1af6
--- /dev/null
+++ b/bitbake/lib/bb/utils.py
@@ -0,0 +1,878 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Utility Functions
5"""
6
7# Copyright (C) 2004 Michael Lauer
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import re, fcntl, os, string, stat, shutil, time
23import sys
24import errno
25import logging
26import bb
27import bb.msg
28import multiprocessing
29import fcntl
30import subprocess
31import glob
32import traceback
33import errno
34from commands import getstatusoutput
35from contextlib import contextmanager
36
37logger = logging.getLogger("BitBake.Util")
38
39def clean_context():
40 return {
41 "os": os,
42 "bb": bb,
43 "time": time,
44 }
45
46def get_context():
47 return _context
48
49
50def set_context(ctx):
51 _context = ctx
52
53# Context used in better_exec, eval
54_context = clean_context()
55
56def explode_version(s):
57 r = []
58 alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
59 numeric_regexp = re.compile('^(\d+)(.*)$')
60 while (s != ''):
61 if s[0] in string.digits:
62 m = numeric_regexp.match(s)
63 r.append((0, int(m.group(1))))
64 s = m.group(2)
65 continue
66 if s[0] in string.letters:
67 m = alpha_regexp.match(s)
68 r.append((1, m.group(1)))
69 s = m.group(2)
70 continue
71 if s[0] == '~':
72 r.append((-1, s[0]))
73 else:
74 r.append((2, s[0]))
75 s = s[1:]
76 return r
77
78def split_version(s):
79 """Split a version string into its constituent parts (PE, PV, PR)"""
80 s = s.strip(" <>=")
81 e = 0
82 if s.count(':'):
83 e = int(s.split(":")[0])
84 s = s.split(":")[1]
85 r = ""
86 if s.count('-'):
87 r = s.rsplit("-", 1)[1]
88 s = s.rsplit("-", 1)[0]
89 v = s
90 return (e, v, r)
91
92def vercmp_part(a, b):
93 va = explode_version(a)
94 vb = explode_version(b)
95 while True:
96 if va == []:
97 (oa, ca) = (0, None)
98 else:
99 (oa, ca) = va.pop(0)
100 if vb == []:
101 (ob, cb) = (0, None)
102 else:
103 (ob, cb) = vb.pop(0)
104 if (oa, ca) == (0, None) and (ob, cb) == (0, None):
105 return 0
106 if oa < ob:
107 return -1
108 elif oa > ob:
109 return 1
110 elif ca < cb:
111 return -1
112 elif ca > cb:
113 return 1
114
115def vercmp(ta, tb):
116 (ea, va, ra) = ta
117 (eb, vb, rb) = tb
118
119 r = int(ea or 0) - int(eb or 0)
120 if (r == 0):
121 r = vercmp_part(va, vb)
122 if (r == 0):
123 r = vercmp_part(ra, rb)
124 return r
125
126def vercmp_string(a, b):
127 ta = split_version(a)
128 tb = split_version(b)
129 return vercmp(ta, tb)
130
131def explode_deps(s):
132 """
133 Take an RDEPENDS style string of format:
134 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
135 and return a list of dependencies.
136 Version information is ignored.
137 """
138 r = []
139 l = s.split()
140 flag = False
141 for i in l:
142 if i[0] == '(':
143 flag = True
144 #j = []
145 if not flag:
146 r.append(i)
147 #else:
148 # j.append(i)
149 if flag and i.endswith(')'):
150 flag = False
151 # Ignore version
152 #r[-1] += ' ' + ' '.join(j)
153 return r
154
155def explode_dep_versions2(s):
156 """
157 Take an RDEPENDS style string of format:
158 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
159 and return a dictionary of dependencies and versions.
160 """
161 r = {}
162 l = s.replace(",", "").split()
163 lastdep = None
164 lastcmp = ""
165 lastver = ""
166 incmp = False
167 inversion = False
168 for i in l:
169 if i[0] == '(':
170 incmp = True
171 i = i[1:].strip()
172 if not i:
173 continue
174
175 if incmp:
176 incmp = False
177 inversion = True
178 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
179 #
180 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
181 # we list each possibly valid item.
182 # The build system is responsible for validation of what it supports.
183 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
184 lastcmp = i[0:2]
185 i = i[2:]
186 elif i.startswith(('<', '>', '=')):
187 lastcmp = i[0:1]
188 i = i[1:]
189 else:
190 # This is an unsupported case!
191 lastcmp = (i or "")
192 i = ""
193 i.strip()
194 if not i:
195 continue
196
197 if inversion:
198 if i.endswith(')'):
199 i = i[:-1] or ""
200 inversion = False
201 if lastver and i:
202 lastver += " "
203 if i:
204 lastver += i
205 if lastdep not in r:
206 r[lastdep] = []
207 r[lastdep].append(lastcmp + " " + lastver)
208 continue
209
210 #if not inversion:
211 lastdep = i
212 lastver = ""
213 lastcmp = ""
214 if not (i in r and r[i]):
215 r[lastdep] = []
216
217 return r
218
219def explode_dep_versions(s):
220 r = explode_dep_versions2(s)
221 for d in r:
222 if not r[d]:
223 r[d] = None
224 continue
225 if len(r[d]) > 1:
226 bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
227 r[d] = r[d][0]
228 return r
229
230def join_deps(deps, commasep=True):
231 """
232 Take the result from explode_dep_versions and generate a dependency string
233 """
234 result = []
235 for dep in deps:
236 if deps[dep]:
237 if isinstance(deps[dep], list):
238 for v in deps[dep]:
239 result.append(dep + " (" + v + ")")
240 else:
241 result.append(dep + " (" + deps[dep] + ")")
242 else:
243 result.append(dep)
244 if commasep:
245 return ", ".join(result)
246 else:
247 return " ".join(result)
248
249def _print_trace(body, line):
250 """
251 Print the Environment of a Text Body
252 """
253 error = []
254 # print the environment of the method
255 min_line = max(1, line-4)
256 max_line = min(line + 4, len(body))
257 for i in range(min_line, max_line + 1):
258 if line == i:
259 error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
260 else:
261 error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
262 return error
263
264def better_compile(text, file, realfile, mode = "exec"):
265 """
266 A better compile method. This method
267 will print the offending lines.
268 """
269 try:
270 return compile(text, file, mode)
271 except Exception as e:
272 error = []
273 # split the text into lines again
274 body = text.split('\n')
275 error.append("Error in compiling python function in %s:\n" % realfile)
276 if e.lineno:
277 error.append("The code lines resulting in this error were:")
278 error.extend(_print_trace(body, e.lineno))
279 else:
280 error.append("The function causing this error was:")
281 for line in body:
282 error.append(line)
283 error.append("%s: %s" % (e.__class__.__name__, str(e)))
284
285 logger.error("\n".join(error))
286
287 e = bb.BBHandledException(e)
288 raise e
289
290def _print_exception(t, value, tb, realfile, text, context):
291 error = []
292 try:
293 exception = traceback.format_exception_only(t, value)
294 error.append('Error executing a python function in %s:\n' % realfile)
295
296 # Strip 'us' from the stack (better_exec call)
297 tb = tb.tb_next
298
299 textarray = text.split('\n')
300
301 linefailed = tb.tb_lineno
302
303 tbextract = traceback.extract_tb(tb)
304 tbformat = traceback.format_list(tbextract)
305 error.append("The stack trace of python calls that resulted in this exception/failure was:")
306 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
307 error.extend(_print_trace(textarray, linefailed))
308
309 # See if this is a function we constructed and has calls back into other functions in
310 # "text". If so, try and improve the context of the error by diving down the trace
311 level = 0
312 nexttb = tb.tb_next
313 while nexttb is not None and (level+1) < len(tbextract):
314 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
315 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
316 # The code was possibly in the string we compiled ourselves
317 error.extend(_print_trace(textarray, tbextract[level+1][1]))
318 elif tbextract[level+1][0].startswith("/"):
319 # The code looks like it might be in a file, try and load it
320 try:
321 with open(tbextract[level+1][0], "r") as f:
322 text = f.readlines()
323 error.extend(_print_trace(text, tbextract[level+1][1]))
324 except:
325 error.append(tbformat[level+1])
326 elif "d" in context and tbextract[level+1][2]:
327 # Try and find the code in the datastore based on the functionname
328 d = context["d"]
329 functionname = tbextract[level+1][2]
330 text = d.getVar(functionname, True)
331 if text:
332 error.extend(_print_trace(text.split('\n'), tbextract[level+1][1]))
333 else:
334 error.append(tbformat[level+1])
335 else:
336 error.append(tbformat[level+1])
337 nexttb = tb.tb_next
338 level = level + 1
339
340 error.append("Exception: %s" % ''.join(exception))
341 finally:
342 logger.error("\n".join(error))
343
344def better_exec(code, context, text = None, realfile = "<code>"):
345 """
346 Similiar to better_compile, better_exec will
347 print the lines that are responsible for the
348 error.
349 """
350 import bb.parse
351 if not text:
352 text = code
353 if not hasattr(code, "co_filename"):
354 code = better_compile(code, realfile, realfile)
355 try:
356 exec(code, get_context(), context)
357 except bb.BBHandledException:
358 # Error already shown so passthrough
359 raise
360 except Exception as e:
361 (t, value, tb) = sys.exc_info()
362
363 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
364 raise
365 try:
366 _print_exception(t, value, tb, realfile, text, context)
367 except Exception as e:
368 logger.error("Exception handler error: %s" % str(e))
369
370 e = bb.BBHandledException(e)
371 raise e
372
373def simple_exec(code, context):
374 exec(code, get_context(), context)
375
376def better_eval(source, locals):
377 return eval(source, get_context(), locals)
378
379@contextmanager
380def fileslocked(files):
381 """Context manager for locking and unlocking file locks."""
382 locks = []
383 if files:
384 for lockfile in files:
385 locks.append(bb.utils.lockfile(lockfile))
386
387 yield
388
389 for lock in locks:
390 bb.utils.unlockfile(lock)
391
392def lockfile(name, shared=False, retry=True):
393 """
394 Use the file fn as a lock file, return when the lock has been acquired.
395 Returns a variable to pass to unlockfile().
396 """
397 dirname = os.path.dirname(name)
398 mkdirhier(dirname)
399
400 if not os.access(dirname, os.W_OK):
401 logger.error("Unable to acquire lock '%s', directory is not writable",
402 name)
403 sys.exit(1)
404
405 op = fcntl.LOCK_EX
406 if shared:
407 op = fcntl.LOCK_SH
408 if not retry:
409 op = op | fcntl.LOCK_NB
410
411 while True:
412 # If we leave the lockfiles lying around there is no problem
413 # but we should clean up after ourselves. This gives potential
414 # for races though. To work around this, when we acquire the lock
415 # we check the file we locked was still the lock file on disk.
416 # by comparing inode numbers. If they don't match or the lockfile
417 # no longer exists, we start again.
418
419 # This implementation is unfair since the last person to request the
420 # lock is the most likely to win it.
421
422 try:
423 lf = open(name, 'a+')
424 fileno = lf.fileno()
425 fcntl.flock(fileno, op)
426 statinfo = os.fstat(fileno)
427 if os.path.exists(lf.name):
428 statinfo2 = os.stat(lf.name)
429 if statinfo.st_ino == statinfo2.st_ino:
430 return lf
431 lf.close()
432 except Exception:
433 try:
434 lf.close()
435 except Exception:
436 pass
437 pass
438 if not retry:
439 return None
440
441def unlockfile(lf):
442 """
443 Unlock a file locked using lockfile()
444 """
445 try:
446 # If we had a shared lock, we need to promote to exclusive before
447 # removing the lockfile. Attempt this, ignore failures.
448 fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
449 os.unlink(lf.name)
450 except (IOError, OSError):
451 pass
452 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
453 lf.close()
454
455def md5_file(filename):
456 """
457 Return the hex string representation of the MD5 checksum of filename.
458 """
459 try:
460 import hashlib
461 m = hashlib.md5()
462 except ImportError:
463 import md5
464 m = md5.new()
465
466 with open(filename, "rb") as f:
467 for line in f:
468 m.update(line)
469 return m.hexdigest()
470
471def sha256_file(filename):
472 """
473 Return the hex string representation of the 256-bit SHA checksum of
474 filename. On Python 2.4 this will return None, so callers will need to
475 handle that by either skipping SHA checks, or running a standalone sha256sum
476 binary.
477 """
478 try:
479 import hashlib
480 except ImportError:
481 return None
482
483 s = hashlib.sha256()
484 with open(filename, "rb") as f:
485 for line in f:
486 s.update(line)
487 return s.hexdigest()
488
489def preserved_envvars_exported():
490 """Variables which are taken from the environment and placed in and exported
491 from the metadata"""
492 return [
493 'BB_TASKHASH',
494 'HOME',
495 'LOGNAME',
496 'PATH',
497 'PWD',
498 'SHELL',
499 'TERM',
500 'USER',
501 ]
502
503def preserved_envvars():
504 """Variables which are taken from the environment and placed in the metadata"""
505 v = [
506 'BBPATH',
507 'BB_PRESERVE_ENV',
508 'BB_ENV_WHITELIST',
509 'BB_ENV_EXTRAWHITE',
510 ]
511 return v + preserved_envvars_exported()
512
513def filter_environment(good_vars):
514 """
515 Create a pristine environment for bitbake. This will remove variables that
516 are not known and may influence the build in a negative way.
517 """
518
519 removed_vars = {}
520 for key in os.environ.keys():
521 if key in good_vars:
522 continue
523
524 removed_vars[key] = os.environ[key]
525 os.unsetenv(key)
526 del os.environ[key]
527
528 if len(removed_vars):
529 logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
530
531 return removed_vars
532
533def approved_variables():
534 """
535 Determine and return the list of whitelisted variables which are approved
536 to remain in the envrionment.
537 """
538 if 'BB_PRESERVE_ENV' in os.environ:
539 return os.environ.keys()
540 approved = []
541 if 'BB_ENV_WHITELIST' in os.environ:
542 approved = os.environ['BB_ENV_WHITELIST'].split()
543 approved.extend(['BB_ENV_WHITELIST'])
544 else:
545 approved = preserved_envvars()
546 if 'BB_ENV_EXTRAWHITE' in os.environ:
547 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
548 if 'BB_ENV_EXTRAWHITE' not in approved:
549 approved.extend(['BB_ENV_EXTRAWHITE'])
550 return approved
551
552def clean_environment():
553 """
554 Clean up any spurious environment variables. This will remove any
555 variables the user hasn't chosen to preserve.
556 """
557 if 'BB_PRESERVE_ENV' not in os.environ:
558 good_vars = approved_variables()
559 return filter_environment(good_vars)
560
561 return {}
562
563def empty_environment():
564 """
565 Remove all variables from the environment.
566 """
567 for s in os.environ.keys():
568 os.unsetenv(s)
569 del os.environ[s]
570
571def build_environment(d):
572 """
573 Build an environment from all exported variables.
574 """
575 import bb.data
576 for var in bb.data.keys(d):
577 export = d.getVarFlag(var, "export")
578 if export:
579 os.environ[var] = d.getVar(var, True) or ""
580
581def remove(path, recurse=False):
582 """Equivalent to rm -f or rm -rf"""
583 if not path:
584 return
585 if recurse:
586 # shutil.rmtree(name) would be ideal but its too slow
587 subprocess.call(['rm', '-rf'] + glob.glob(path))
588 return
589 for name in glob.glob(path):
590 try:
591 os.unlink(name)
592 except OSError as exc:
593 if exc.errno != errno.ENOENT:
594 raise
595
596def prunedir(topdir):
597 # Delete everything reachable from the directory named in 'topdir'.
598 # CAUTION: This is dangerous!
599 for root, dirs, files in os.walk(topdir, topdown = False):
600 for name in files:
601 os.remove(os.path.join(root, name))
602 for name in dirs:
603 if os.path.islink(os.path.join(root, name)):
604 os.remove(os.path.join(root, name))
605 else:
606 os.rmdir(os.path.join(root, name))
607 os.rmdir(topdir)
608
609#
610# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
611# but thats possibly insane and suffixes is probably going to be small
612#
613def prune_suffix(var, suffixes, d):
614 # See if var ends with any of the suffixes listed and
615 # remove it if found
616 for suffix in suffixes:
617 if var.endswith(suffix):
618 return var.replace(suffix, "")
619 return var
620
621def mkdirhier(directory):
622 """Create a directory like 'mkdir -p', but does not complain if
623 directory already exists like os.makedirs
624 """
625
626 try:
627 os.makedirs(directory)
628 except OSError as e:
629 if e.errno != errno.EEXIST:
630 raise e
631
632def movefile(src, dest, newmtime = None, sstat = None):
633 """Moves a file from src to dest, preserving all permissions and
634 attributes; mtime will be preserved even when moving across
635 filesystems. Returns true on success and false on failure. Move is
636 atomic.
637 """
638
639 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
640 try:
641 if not sstat:
642 sstat = os.lstat(src)
643 except Exception as e:
644 print("movefile: Stating source file failed...", e)
645 return None
646
647 destexists = 1
648 try:
649 dstat = os.lstat(dest)
650 except:
651 dstat = os.lstat(os.path.dirname(dest))
652 destexists = 0
653
654 if destexists:
655 if stat.S_ISLNK(dstat[stat.ST_MODE]):
656 try:
657 os.unlink(dest)
658 destexists = 0
659 except Exception as e:
660 pass
661
662 if stat.S_ISLNK(sstat[stat.ST_MODE]):
663 try:
664 target = os.readlink(src)
665 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
666 os.unlink(dest)
667 os.symlink(target, dest)
668 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
669 os.unlink(src)
670 return os.lstat(dest)
671 except Exception as e:
672 print("movefile: failed to properly create symlink:", dest, "->", target, e)
673 return None
674
675 renamefailed = 1
676 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
677 try:
678 os.rename(src, dest)
679 renamefailed = 0
680 except Exception as e:
681 if e[0] != errno.EXDEV:
682 # Some random error.
683 print("movefile: Failed to move", src, "to", dest, e)
684 return None
685 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
686
687 if renamefailed:
688 didcopy = 0
689 if stat.S_ISREG(sstat[stat.ST_MODE]):
690 try: # For safety copy then move it over.
691 shutil.copyfile(src, dest + "#new")
692 os.rename(dest + "#new", dest)
693 didcopy = 1
694 except Exception as e:
695 print('movefile: copy', src, '->', dest, 'failed.', e)
696 return None
697 else:
698 #we don't yet handle special, so we need to fall back to /bin/mv
699 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
700 if a[0] != 0:
701 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
702 return None # failure
703 try:
704 if didcopy:
705 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
706 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
707 os.unlink(src)
708 except Exception as e:
709 print("movefile: Failed to chown/chmod/unlink", dest, e)
710 return None
711
712 if newmtime:
713 os.utime(dest, (newmtime, newmtime))
714 else:
715 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
716 newmtime = sstat[stat.ST_MTIME]
717 return newmtime
718
719def copyfile(src, dest, newmtime = None, sstat = None):
720 """
721 Copies a file from src to dest, preserving all permissions and
722 attributes; mtime will be preserved even when moving across
723 filesystems. Returns true on success and false on failure.
724 """
725 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
726 try:
727 if not sstat:
728 sstat = os.lstat(src)
729 except Exception as e:
730 logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
731 return False
732
733 destexists = 1
734 try:
735 dstat = os.lstat(dest)
736 except:
737 dstat = os.lstat(os.path.dirname(dest))
738 destexists = 0
739
740 if destexists:
741 if stat.S_ISLNK(dstat[stat.ST_MODE]):
742 try:
743 os.unlink(dest)
744 destexists = 0
745 except Exception as e:
746 pass
747
748 if stat.S_ISLNK(sstat[stat.ST_MODE]):
749 try:
750 target = os.readlink(src)
751 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
752 os.unlink(dest)
753 os.symlink(target, dest)
754 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
755 return os.lstat(dest)
756 except Exception as e:
757 logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
758 return False
759
760 if stat.S_ISREG(sstat[stat.ST_MODE]):
761 try:
762 srcchown = False
763 if not os.access(src, os.R_OK):
764 # Make sure we can read it
765 srcchown = True
766 os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
767
768 # For safety copy then move it over.
769 shutil.copyfile(src, dest + "#new")
770 os.rename(dest + "#new", dest)
771 except Exception as e:
772 logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
773 return False
774 finally:
775 if srcchown:
776 os.chmod(src, sstat[stat.ST_MODE])
777 os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
778
779 else:
780 #we don't yet handle special, so we need to fall back to /bin/mv
781 a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
782 if a[0] != 0:
783 logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
784 return False # failure
785 try:
786 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
787 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
788 except Exception as e:
789 logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
790 return False
791
792 if newmtime:
793 os.utime(dest, (newmtime, newmtime))
794 else:
795 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
796 newmtime = sstat[stat.ST_MTIME]
797 return newmtime
798
799def which(path, item, direction = 0, history = False):
800 """
801 Locate a file in a PATH
802 """
803
804 hist = []
805 paths = (path or "").split(':')
806 if direction != 0:
807 paths.reverse()
808
809 for p in paths:
810 next = os.path.join(p, item)
811 hist.append(next)
812 if os.path.exists(next):
813 if not os.path.isabs(next):
814 next = os.path.abspath(next)
815 if history:
816 return next, hist
817 return next
818
819 if history:
820 return "", hist
821 return ""
822
823def to_boolean(string, default=None):
824 if not string:
825 return default
826
827 normalized = string.lower()
828 if normalized in ("y", "yes", "1", "true"):
829 return True
830 elif normalized in ("n", "no", "0", "false"):
831 return False
832 else:
833 raise ValueError("Invalid value for to_boolean: %s" % string)
834
835def contains(variable, checkvalues, truevalue, falsevalue, d):
836 val = d.getVar(variable, True)
837 if not val:
838 return falsevalue
839 val = set(val.split())
840 if isinstance(checkvalues, basestring):
841 checkvalues = set(checkvalues.split())
842 else:
843 checkvalues = set(checkvalues)
844 if checkvalues.issubset(val):
845 return truevalue
846 return falsevalue
847
848def cpu_count():
849 return multiprocessing.cpu_count()
850
851def nonblockingfd(fd):
852 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
853
854def process_profilelog(fn):
855 # Redirect stdout to capture profile information
856 pout = open(fn + '.processed', 'w')
857 so = sys.stdout.fileno()
858 orig_so = os.dup(sys.stdout.fileno())
859 os.dup2(pout.fileno(), so)
860
861 import pstats
862 p = pstats.Stats(fn)
863 p.sort_stats('time')
864 p.print_stats()
865 p.print_callers()
866 p.sort_stats('cumulative')
867 p.print_stats()
868
869 os.dup2(orig_so, so)
870 pout.flush()
871 pout.close()
872
873#
874# Was present to work around multiprocessing pool bugs in python < 2.7.3
875#
876def multiprocessingpool(*args, **kwargs):
877 return multiprocessing.Pool(*args, **kwargs)
878