summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
authorTudor Florea <tudor.florea@enea.com>2015-10-09 22:59:03 +0200
committerTudor Florea <tudor.florea@enea.com>2015-10-09 22:59:03 +0200
commit972dcfcdbfe75dcfeb777150c136576cf1a71e99 (patch)
tree97a61cd7e293d7ae9d56ef7ed0f81253365bb026 /bitbake/lib/bb
downloadpoky-972dcfcdbfe75dcfeb777150c136576cf1a71e99.tar.gz
initial commit for Enea Linux 5.0 arm
Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/COW.py323
-rw-r--r--bitbake/lib/bb/__init__.py142
-rw-r--r--bitbake/lib/bb/build.py711
-rw-r--r--bitbake/lib/bb/cache.py837
-rw-r--r--bitbake/lib/bb/cache_extra.py75
-rw-r--r--bitbake/lib/bb/checksum.py90
-rw-r--r--bitbake/lib/bb/codeparser.py406
-rw-r--r--bitbake/lib/bb/command.py451
-rw-r--r--bitbake/lib/bb/compat.py6
-rw-r--r--bitbake/lib/bb/cooker.py2025
-rw-r--r--bitbake/lib/bb/cookerdata.py320
-rw-r--r--bitbake/lib/bb/daemonize.py193
-rw-r--r--bitbake/lib/bb/data.py446
-rw-r--r--bitbake/lib/bb/data_smart.py811
-rw-r--r--bitbake/lib/bb/event.py639
-rw-r--r--bitbake/lib/bb/exceptions.py91
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1585
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py143
-rw-r--r--bitbake/lib/bb/fetch2/clearcase.py263
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py171
-rw-r--r--bitbake/lib/bb/fetch2/git.py358
-rw-r--r--bitbake/lib/bb/fetch2/gitannex.py76
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py136
-rw-r--r--bitbake/lib/bb/fetch2/hg.py193
-rw-r--r--bitbake/lib/bb/fetch2/local.py128
-rw-r--r--bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py187
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py129
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py127
-rw-r--r--bitbake/lib/bb/fetch2/svn.py192
-rw-r--r--bitbake/lib/bb/fetch2/wget.py106
-rw-r--r--bitbake/lib/bb/methodpool.py29
-rw-r--r--bitbake/lib/bb/monitordisk.py263
-rw-r--r--bitbake/lib/bb/msg.py196
-rw-r--r--bitbake/lib/bb/namedtuple_with_abc.py255
-rw-r--r--bitbake/lib/bb/parse/__init__.py164
-rw-r--r--bitbake/lib/bb/parse/ast.py482
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py261
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py189
-rw-r--r--bitbake/lib/bb/parse/parse_py/__init__.py33
-rw-r--r--bitbake/lib/bb/persist_data.py217
-rw-r--r--bitbake/lib/bb/process.py133
-rw-r--r--bitbake/lib/bb/providers.py381
-rw-r--r--bitbake/lib/bb/pysh/__init__.py0
-rw-r--r--bitbake/lib/bb/pysh/builtin.py710
-rw-r--r--bitbake/lib/bb/pysh/interp.py1367
-rw-r--r--bitbake/lib/bb/pysh/lsprof.py116
-rw-r--r--bitbake/lib/bb/pysh/pysh.py167
-rw-r--r--bitbake/lib/bb/pysh/pyshlex.py888
-rw-r--r--bitbake/lib/bb/pysh/pyshyacc.py779
-rw-r--r--bitbake/lib/bb/pysh/sherrors.py41
-rw-r--r--bitbake/lib/bb/pysh/subprocess_fix.py77
-rw-r--r--bitbake/lib/bb/runqueue.py2172
-rw-r--r--bitbake/lib/bb/server/__init__.py96
-rw-r--r--bitbake/lib/bb/server/process.py252
-rw-r--r--bitbake/lib/bb/server/xmlrpc.py383
-rw-r--r--bitbake/lib/bb/shell.py820
-rw-r--r--bitbake/lib/bb/siggen.py486
-rw-r--r--bitbake/lib/bb/taskdata.py651
-rw-r--r--bitbake/lib/bb/tests/__init__.py0
-rw-r--r--bitbake/lib/bb/tests/codeparser.py375
-rw-r--r--bitbake/lib/bb/tests/cow.py136
-rw-r--r--bitbake/lib/bb/tests/data.py351
-rw-r--r--bitbake/lib/bb/tests/fetch.py569
-rw-r--r--bitbake/lib/bb/tests/utils.py103
-rw-r--r--bitbake/lib/bb/tinfoil.py99
-rw-r--r--bitbake/lib/bb/ui/__init__.py17
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py1023
-rw-r--r--bitbake/lib/bb/ui/crumbs/__init__.py17
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/builddetailspage.py437
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/builder.py1475
-rw-r--r--bitbake/lib/bb/ui/crumbs/buildmanager.py455
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/__init__.py0
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py341
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py44
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py70
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py219
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py172
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py298
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py163
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/propertydialog.py437
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py90
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py51
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py159
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py122
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py891
-rw-r--r--bitbake/lib/bb/ui/crumbs/hobcolor.py38
-rw-r--r--bitbake/lib/bb/ui/crumbs/hobeventhandler.py639
-rw-r--r--bitbake/lib/bb/ui/crumbs/hoblistmodel.py903
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/hobpages.py128
-rw-r--r--bitbake/lib/bb/ui/crumbs/hobwidget.py904
-rw-r--r--bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py561
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/imagedetailspage.py669
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/packageselectionpage.py355
-rw-r--r--bitbake/lib/bb/ui/crumbs/persistenttooltip.py186
-rw-r--r--bitbake/lib/bb/ui/crumbs/progress.py23
-rw-r--r--bitbake/lib/bb/ui/crumbs/progressbar.py59
-rw-r--r--bitbake/lib/bb/ui/crumbs/puccho.glade606
-rwxr-xr-xbitbake/lib/bb/ui/crumbs/recipeselectionpage.py335
-rw-r--r--bitbake/lib/bb/ui/crumbs/runningbuild.py551
-rw-r--r--bitbake/lib/bb/ui/crumbs/sanitycheckpage.py85
-rw-r--r--bitbake/lib/bb/ui/crumbs/utils.py34
-rw-r--r--bitbake/lib/bb/ui/depexp.py326
-rw-r--r--bitbake/lib/bb/ui/goggle.py121
-rwxr-xr-xbitbake/lib/bb/ui/hob.py109
-rw-r--r--bitbake/lib/bb/ui/icons/images/images_display.pngbin0 -> 6898 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/images/images_hover.pngbin0 -> 7051 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/add-hover.pngbin0 -> 1212 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/add.pngbin0 -> 1176 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/alert.pngbin0 -> 3954 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/confirmation.pngbin0 -> 5789 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/denied.pngbin0 -> 3955 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/error.pngbin0 -> 6482 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/info.pngbin0 -> 3311 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/issues.pngbin0 -> 4549 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/refresh.pngbin0 -> 5250 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/remove-hover.pngbin0 -> 2809 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/remove.pngbin0 -> 1971 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/indicators/tick.pngbin0 -> 4563 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/info/info_display.pngbin0 -> 4117 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/info/info_hover.pngbin0 -> 4167 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/layers/layers_display.pngbin0 -> 4840 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/layers/layers_hover.pngbin0 -> 5257 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/packages/packages_display.pngbin0 -> 7011 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/packages/packages_hover.pngbin0 -> 7121 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/recipe/recipe_display.pngbin0 -> 4723 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/recipe/recipe_hover.pngbin0 -> 4866 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/settings/settings_display.pngbin0 -> 6076 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/settings/settings_hover.pngbin0 -> 6269 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/templates/templates_display.pngbin0 -> 5651 bytes
-rw-r--r--bitbake/lib/bb/ui/icons/templates/templates_hover.pngbin0 -> 5791 bytes
-rw-r--r--bitbake/lib/bb/ui/knotty.py559
-rw-r--r--bitbake/lib/bb/ui/ncurses.py373
-rw-r--r--bitbake/lib/bb/ui/puccho.py425
-rw-r--r--bitbake/lib/bb/ui/toasterui.py312
-rw-r--r--bitbake/lib/bb/ui/uievent.py150
-rw-r--r--bitbake/lib/bb/ui/uihelper.py100
-rw-r--r--bitbake/lib/bb/utils.py916
139 files changed, 40782 insertions, 0 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
new file mode 100644
index 0000000000..6917ec378a
--- /dev/null
+++ b/bitbake/lib/bb/COW.py
@@ -0,0 +1,323 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
5#
6# Copyright (C) 2006 Tim Amsell
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21#Please Note:
22# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
23# Assign a file to __warn__ to get warnings about slow operations.
24#
25
26from __future__ import print_function
27import copy
28import types
29ImmutableTypes = (
30 types.NoneType,
31 bool,
32 complex,
33 float,
34 int,
35 long,
36 tuple,
37 frozenset,
38 basestring
39)
40
41MUTABLE = "__mutable__"
42
43class COWMeta(type):
44 pass
45
46class COWDictMeta(COWMeta):
47 __warn__ = False
48 __hasmutable__ = False
49 __marker__ = tuple()
50
51 def __str__(cls):
52 # FIXME: I have magic numbers!
53 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
54 __repr__ = __str__
55
56 def cow(cls):
57 class C(cls):
58 __count__ = cls.__count__ + 1
59 return C
60 copy = cow
61 __call__ = cow
62
63 def __setitem__(cls, key, value):
64 if not isinstance(value, ImmutableTypes):
65 if not isinstance(value, COWMeta):
66 cls.__hasmutable__ = True
67 key += MUTABLE
68 setattr(cls, key, value)
69
70 def __getmutable__(cls, key, readonly=False):
71 nkey = key + MUTABLE
72 try:
73 return cls.__dict__[nkey]
74 except KeyError:
75 pass
76
77 value = getattr(cls, nkey)
78 if readonly:
79 return value
80
81 if not cls.__warn__ is False and not isinstance(value, COWMeta):
82 print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__)
83 try:
84 value = value.copy()
85 except AttributeError as e:
86 value = copy.copy(value)
87 setattr(cls, nkey, value)
88 return value
89
90 __getmarker__ = []
91 def __getreadonly__(cls, key, default=__getmarker__):
92 """\
93 Get a value (even if mutable) which you promise not to change.
94 """
95 return cls.__getitem__(key, default, True)
96
97 def __getitem__(cls, key, default=__getmarker__, readonly=False):
98 try:
99 try:
100 value = getattr(cls, key)
101 except AttributeError:
102 value = cls.__getmutable__(key, readonly)
103
104 # This is for values which have been deleted
105 if value is cls.__marker__:
106 raise AttributeError("key %s does not exist." % key)
107
108 return value
109 except AttributeError as e:
110 if not default is cls.__getmarker__:
111 return default
112
113 raise KeyError(str(e))
114
115 def __delitem__(cls, key):
116 cls.__setitem__(key, cls.__marker__)
117
118 def __revertitem__(cls, key):
119 if not cls.__dict__.has_key(key):
120 key += MUTABLE
121 delattr(cls, key)
122
123 def __contains__(cls, key):
124 return cls.has_key(key)
125
126 def has_key(cls, key):
127 value = cls.__getreadonly__(key, cls.__marker__)
128 if value is cls.__marker__:
129 return False
130 return True
131
132 def iter(cls, type, readonly=False):
133 for key in dir(cls):
134 if key.startswith("__"):
135 continue
136
137 if key.endswith(MUTABLE):
138 key = key[:-len(MUTABLE)]
139
140 if type == "keys":
141 yield key
142
143 try:
144 if readonly:
145 value = cls.__getreadonly__(key)
146 else:
147 value = cls[key]
148 except KeyError:
149 continue
150
151 if type == "values":
152 yield value
153 if type == "items":
154 yield (key, value)
155 raise StopIteration()
156
157 def iterkeys(cls):
158 return cls.iter("keys")
159 def itervalues(cls, readonly=False):
160 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
161 print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
162 return cls.iter("values", readonly)
163 def iteritems(cls, readonly=False):
164 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
165 print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
166 return cls.iter("items", readonly)
167
168class COWSetMeta(COWDictMeta):
169 def __str__(cls):
170 # FIXME: I have magic numbers!
171 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
172 __repr__ = __str__
173
174 def cow(cls):
175 class C(cls):
176 __count__ = cls.__count__ + 1
177 return C
178
179 def add(cls, value):
180 COWDictMeta.__setitem__(cls, repr(hash(value)), value)
181
182 def remove(cls, value):
183 COWDictMeta.__delitem__(cls, repr(hash(value)))
184
185 def __in__(cls, value):
186 return COWDictMeta.has_key(repr(hash(value)))
187
188 def iterkeys(cls):
189 raise TypeError("sets don't have keys")
190
191 def iteritems(cls):
192 raise TypeError("sets don't have 'items'")
193
194# These are the actual classes you use!
195class COWDictBase(object):
196 __metaclass__ = COWDictMeta
197 __count__ = 0
198
199class COWSetBase(object):
200 __metaclass__ = COWSetMeta
201 __count__ = 0
202
203if __name__ == "__main__":
204 import sys
205 COWDictBase.__warn__ = sys.stderr
206 a = COWDictBase()
207 print("a", a)
208
209 a['a'] = 'a'
210 a['b'] = 'b'
211 a['dict'] = {}
212
213 b = a.copy()
214 print("b", b)
215 b['c'] = 'b'
216
217 print()
218
219 print("a", a)
220 for x in a.iteritems():
221 print(x)
222 print("--")
223 print("b", b)
224 for x in b.iteritems():
225 print(x)
226 print()
227
228 b['dict']['a'] = 'b'
229 b['a'] = 'c'
230
231 print("a", a)
232 for x in a.iteritems():
233 print(x)
234 print("--")
235 print("b", b)
236 for x in b.iteritems():
237 print(x)
238 print()
239
240 try:
241 b['dict2']
242 except KeyError as e:
243 print("Okay!")
244
245 a['set'] = COWSetBase()
246 a['set'].add("o1")
247 a['set'].add("o1")
248 a['set'].add("o2")
249
250 print("a", a)
251 for x in a['set'].itervalues():
252 print(x)
253 print("--")
254 print("b", b)
255 for x in b['set'].itervalues():
256 print(x)
257 print()
258
259 b['set'].add('o3')
260
261 print("a", a)
262 for x in a['set'].itervalues():
263 print(x)
264 print("--")
265 print("b", b)
266 for x in b['set'].itervalues():
267 print(x)
268 print()
269
270 a['set2'] = set()
271 a['set2'].add("o1")
272 a['set2'].add("o1")
273 a['set2'].add("o2")
274
275 print("a", a)
276 for x in a.iteritems():
277 print(x)
278 print("--")
279 print("b", b)
280 for x in b.iteritems(readonly=True):
281 print(x)
282 print()
283
284 del b['b']
285 try:
286 print(b['b'])
287 except KeyError:
288 print("Yay! deleted key raises error")
289
290 if b.has_key('b'):
291 print("Boo!")
292 else:
293 print("Yay - has_key with delete works!")
294
295 print("a", a)
296 for x in a.iteritems():
297 print(x)
298 print("--")
299 print("b", b)
300 for x in b.iteritems(readonly=True):
301 print(x)
302 print()
303
304 b.__revertitem__('b')
305
306 print("a", a)
307 for x in a.iteritems():
308 print(x)
309 print("--")
310 print("b", b)
311 for x in b.iteritems(readonly=True):
312 print(x)
313 print()
314
315 b.__revertitem__('dict')
316 print("a", a)
317 for x in a.iteritems():
318 print(x)
319 print("--")
320 print("b", b)
321 for x in b.iteritems(readonly=True):
322 print(x)
323 print()
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
new file mode 100644
index 0000000000..4d69552c44
--- /dev/null
+++ b/bitbake/lib/bb/__init__.py
@@ -0,0 +1,142 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Build System Python Library
5#
6# Copyright (C) 2003 Holger Schurig
7# Copyright (C) 2003, 2004 Chris Larson
8#
9# Based on Gentoo's portage.py.
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24__version__ = "1.24.0"
25
26import sys
27if sys.version_info < (2, 7, 3):
28 raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake")
29
30
31class BBHandledException(Exception):
32 """
33 The big dilemma for generic bitbake code is what information to give the user
34 when an exception occurs. Any exception inheriting this base exception class
35 has already provided information to the user via some 'fired' message type such as
36 an explicitly fired event using bb.fire, or a bb.error message. If bitbake
37 encounters an exception derived from this class, no backtrace or other information
38 will be given to the user, its assumed the earlier event provided the relevant information.
39 """
40 pass
41
42import os
43import logging
44
45
46class NullHandler(logging.Handler):
47 def emit(self, record):
48 pass
49
50Logger = logging.getLoggerClass()
51class BBLogger(Logger):
52 def __init__(self, name):
53 if name.split(".")[0] == "BitBake":
54 self.debug = self.bbdebug
55 Logger.__init__(self, name)
56
57 def bbdebug(self, level, msg, *args, **kwargs):
58 return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs)
59
60 def plain(self, msg, *args, **kwargs):
61 return self.log(logging.INFO + 1, msg, *args, **kwargs)
62
63 def verbose(self, msg, *args, **kwargs):
64 return self.log(logging.INFO - 1, msg, *args, **kwargs)
65
66logging.raiseExceptions = False
67logging.setLoggerClass(BBLogger)
68
69logger = logging.getLogger("BitBake")
70logger.addHandler(NullHandler())
71logger.setLevel(logging.DEBUG - 2)
72
73# This has to be imported after the setLoggerClass, as the import of bb.msg
74# can result in construction of the various loggers.
75import bb.msg
76
77from bb import fetch2 as fetch
78sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
79
80# Messaging convenience functions
81def plain(*args):
82 logger.plain(''.join(args))
83
84def debug(lvl, *args):
85 if isinstance(lvl, basestring):
86 logger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
87 args = (lvl,) + args
88 lvl = 1
89 logger.debug(lvl, ''.join(args))
90
91def note(*args):
92 logger.info(''.join(args))
93
94def warn(*args):
95 logger.warn(''.join(args))
96
97def error(*args):
98 logger.error(''.join(args))
99
100def fatal(*args):
101 logger.critical(''.join(args))
102 raise BBHandledException()
103
104def deprecated(func, name=None, advice=""):
105 """This is a decorator which can be used to mark functions
106 as deprecated. It will result in a warning being emitted
107 when the function is used."""
108 import warnings
109
110 if advice:
111 advice = ": %s" % advice
112 if name is None:
113 name = func.__name__
114
115 def newFunc(*args, **kwargs):
116 warnings.warn("Call to deprecated function %s%s." % (name,
117 advice),
118 category=DeprecationWarning,
119 stacklevel=2)
120 return func(*args, **kwargs)
121 newFunc.__name__ = func.__name__
122 newFunc.__doc__ = func.__doc__
123 newFunc.__dict__.update(func.__dict__)
124 return newFunc
125
126# For compatibility
127def deprecate_import(current, modulename, fromlist, renames = None):
128 """Import objects from one module into another, wrapping them with a DeprecationWarning"""
129 import sys
130
131 module = __import__(modulename, fromlist = fromlist)
132 for position, objname in enumerate(fromlist):
133 obj = getattr(module, objname)
134 newobj = deprecated(obj, "{0}.{1}".format(current, objname),
135 "Please use {0}.{1} instead".format(modulename, objname))
136 if renames:
137 newname = renames[position]
138 else:
139 newname = objname
140
141 setattr(sys.modules[current], newname, newobj)
142
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
new file mode 100644
index 0000000000..65cc851df4
--- /dev/null
+++ b/bitbake/lib/bb/build.py
@@ -0,0 +1,711 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake 'Build' implementation
5#
6# Core code for function execution and task handling in the
7# BitBake build tools.
8#
9# Copyright (C) 2003, 2004 Chris Larson
10#
11# Based on Gentoo's portage.py.
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import sys
30import logging
31import shlex
32import glob
33import time
34import bb
35import bb.msg
36import bb.process
37from contextlib import nested
38from bb import event, utils
39
40bblogger = logging.getLogger('BitBake')
41logger = logging.getLogger('BitBake.Build')
42
43NULL = open(os.devnull, 'r+')
44
45# When we execute a Python function, we'd like certain things
46# in all namespaces, hence we add them to __builtins__.
47# If we do not do this and use the exec globals, they will
48# not be available to subfunctions.
49__builtins__['bb'] = bb
50__builtins__['os'] = os
51
52class FuncFailed(Exception):
53 def __init__(self, name = None, logfile = None):
54 self.logfile = logfile
55 self.name = name
56 if name:
57 self.msg = 'Function failed: %s' % name
58 else:
59 self.msg = "Function failed"
60
61 def __str__(self):
62 if self.logfile and os.path.exists(self.logfile):
63 msg = ("%s (log file is located at %s)" %
64 (self.msg, self.logfile))
65 else:
66 msg = self.msg
67 return msg
68
69class TaskBase(event.Event):
70 """Base class for task events"""
71
72 def __init__(self, t, logfile, d):
73 self._task = t
74 self._package = d.getVar("PF", True)
75 self.taskfile = d.getVar("FILE", True)
76 self.taskname = self._task
77 self.logfile = logfile
78 self.time = time.time()
79 event.Event.__init__(self)
80 self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName())
81
82 def getTask(self):
83 return self._task
84
85 def setTask(self, task):
86 self._task = task
87
88 def getDisplayName(self):
89 return bb.event.getName(self)[4:]
90
91 task = property(getTask, setTask, None, "task property")
92
93class TaskStarted(TaskBase):
94 """Task execution started"""
95 def __init__(self, t, logfile, taskflags, d):
96 super(TaskStarted, self).__init__(t, logfile, d)
97 self.taskflags = taskflags
98
99class TaskSucceeded(TaskBase):
100 """Task execution completed"""
101
102class TaskFailed(TaskBase):
103 """Task execution failed"""
104
105 def __init__(self, task, logfile, metadata, errprinted = False):
106 self.errprinted = errprinted
107 super(TaskFailed, self).__init__(task, logfile, metadata)
108
109class TaskFailedSilent(TaskBase):
110 """Task execution failed (silently)"""
111 def getDisplayName(self):
112 # Don't need to tell the user it was silent
113 return "Failed"
114
115class TaskInvalid(TaskBase):
116
117 def __init__(self, task, metadata):
118 super(TaskInvalid, self).__init__(task, None, metadata)
119 self._message = "No such task '%s'" % task
120
121
122class LogTee(object):
123 def __init__(self, logger, outfile):
124 self.outfile = outfile
125 self.logger = logger
126 self.name = self.outfile.name
127
128 def write(self, string):
129 self.logger.plain(string)
130 self.outfile.write(string)
131
132 def __enter__(self):
133 self.outfile.__enter__()
134 return self
135
136 def __exit__(self, *excinfo):
137 self.outfile.__exit__(*excinfo)
138
139 def __repr__(self):
140 return '<LogTee {0}>'.format(self.name)
141 def flush(self):
142 self.outfile.flush()
143
144def exec_func(func, d, dirs = None):
145 """Execute a BB 'function'"""
146
147 body = d.getVar(func)
148 if not body:
149 if body is None:
150 logger.warn("Function %s doesn't exist", func)
151 return
152
153 flags = d.getVarFlags(func)
154 cleandirs = flags.get('cleandirs')
155 if cleandirs:
156 for cdir in d.expand(cleandirs).split():
157 bb.utils.remove(cdir, True)
158 bb.utils.mkdirhier(cdir)
159
160 if dirs is None:
161 dirs = flags.get('dirs')
162 if dirs:
163 dirs = d.expand(dirs).split()
164
165 if dirs:
166 for adir in dirs:
167 bb.utils.mkdirhier(adir)
168 adir = dirs[-1]
169 else:
170 adir = d.getVar('B', True)
171 bb.utils.mkdirhier(adir)
172
173 ispython = flags.get('python')
174
175 lockflag = flags.get('lockfiles')
176 if lockflag:
177 lockfiles = [f for f in d.expand(lockflag).split()]
178 else:
179 lockfiles = None
180
181 tempdir = d.getVar('T', True)
182
183 # or func allows items to be executed outside of the normal
184 # task set, such as buildhistory
185 task = d.getVar('BB_RUNTASK', True) or func
186 if task == func:
187 taskfunc = task
188 else:
189 taskfunc = "%s.%s" % (task, func)
190
191 runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}"
192 runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
193 runfile = os.path.join(tempdir, runfn)
194 bb.utils.mkdirhier(os.path.dirname(runfile))
195
196 # Setup the courtesy link to the runfn, only for tasks
197 # we create the link 'just' before the run script is created
198 # if we create it after, and if the run script fails, then the
199 # link won't be created as an exception would be fired.
200 if task == func:
201 runlink = os.path.join(tempdir, 'run.{0}'.format(task))
202 if runlink:
203 bb.utils.remove(runlink)
204
205 try:
206 os.symlink(runfn, runlink)
207 except OSError:
208 pass
209
210 with bb.utils.fileslocked(lockfiles):
211 if ispython:
212 exec_func_python(func, d, runfile, cwd=adir)
213 else:
214 exec_func_shell(func, d, runfile, cwd=adir)
215
216_functionfmt = """
217def {function}(d):
218{body}
219
220{function}(d)
221"""
222logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
223def exec_func_python(func, d, runfile, cwd=None):
224 """Execute a python BB 'function'"""
225
226 bbfile = d.getVar('FILE', True)
227 code = _functionfmt.format(function=func, body=d.getVar(func, True))
228 bb.utils.mkdirhier(os.path.dirname(runfile))
229 with open(runfile, 'w') as script:
230 bb.data.emit_func_python(func, script, d)
231
232 if cwd:
233 try:
234 olddir = os.getcwd()
235 except OSError:
236 olddir = None
237 os.chdir(cwd)
238
239 bb.debug(2, "Executing python function %s" % func)
240
241 try:
242 comp = utils.better_compile(code, func, bbfile)
243 utils.better_exec(comp, {"d": d}, code, bbfile)
244 except (bb.parse.SkipRecipe, bb.build.FuncFailed):
245 raise
246 except:
247 raise FuncFailed(func, None)
248 finally:
249 bb.debug(2, "Python function %s finished" % func)
250
251 if cwd and olddir:
252 try:
253 os.chdir(olddir)
254 except OSError:
255 pass
256
257def shell_trap_code():
258 return '''#!/bin/sh\n
259# Emit a useful diagnostic if something fails:
260bb_exit_handler() {
261 ret=$?
262 case $ret in
263 0) ;;
264 *) case $BASH_VERSION in
265 "") echo "WARNING: exit code $ret from a shell command.";;
266 *) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from
267 \"$BASH_COMMAND\"";;
268 esac
269 exit $ret
270 esac
271}
272trap 'bb_exit_handler' 0
273set -e
274'''
275
276def exec_func_shell(func, d, runfile, cwd=None):
277 """Execute a shell function from the metadata
278
279 Note on directory behavior. The 'dirs' varflag should contain a list
280 of the directories you need created prior to execution. The last
281 item in the list is where we will chdir/cd to.
282 """
283
284 # Don't let the emitted shell script override PWD
285 d.delVarFlag('PWD', 'export')
286
287 with open(runfile, 'w') as script:
288 script.write(shell_trap_code())
289
290 bb.data.emit_func(func, script, d)
291
292 if bb.msg.loggerVerboseLogs:
293 script.write("set -x\n")
294 if cwd:
295 script.write("cd '%s'\n" % cwd)
296 script.write("%s\n" % func)
297 script.write('''
298# cleanup
299ret=$?
300trap '' 0
301exit $?
302''')
303
304 os.chmod(runfile, 0775)
305
306 cmd = runfile
307 if d.getVarFlag(func, 'fakeroot'):
308 fakerootcmd = d.getVar('FAKEROOT', True)
309 if fakerootcmd:
310 cmd = [fakerootcmd, runfile]
311
312 if bb.msg.loggerDefaultVerbose:
313 logfile = LogTee(logger, sys.stdout)
314 else:
315 logfile = sys.stdout
316
317 bb.debug(2, "Executing shell function %s" % func)
318
319 try:
320 with open(os.devnull, 'r+') as stdin:
321 bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
322 except bb.process.CmdError:
323 logfn = d.getVar('BB_LOGFILE', True)
324 raise FuncFailed(func, logfn)
325
326 bb.debug(2, "Shell function %s finished" % func)
327
328def _task_data(fn, task, d):
329 localdata = bb.data.createCopy(d)
330 localdata.setVar('BB_FILENAME', fn)
331 localdata.setVar('BB_CURRENTTASK', task[3:])
332 localdata.setVar('OVERRIDES', 'task-%s:%s' %
333 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
334 localdata.finalize()
335 bb.data.expandKeys(localdata)
336 return localdata
337
338def _exec_task(fn, task, d, quieterr):
339 """Execute a BB 'task'
340
341 Execution of a task involves a bit more setup than executing a function,
342 running it with its own local metadata, and with some useful variables set.
343 """
344 if not d.getVarFlag(task, 'task'):
345 event.fire(TaskInvalid(task, d), d)
346 logger.error("No such task: %s" % task)
347 return 1
348
349 logger.debug(1, "Executing task %s", task)
350
351 localdata = _task_data(fn, task, d)
352 tempdir = localdata.getVar('T', True)
353 if not tempdir:
354 bb.fatal("T variable not set, unable to build")
355
356 # Change nice level if we're asked to
357 nice = localdata.getVar("BB_TASK_NICE_LEVEL", True)
358 if nice:
359 curnice = os.nice(0)
360 nice = int(nice) - curnice
361 newnice = os.nice(nice)
362 logger.debug(1, "Renice to %s " % newnice)
363
364 bb.utils.mkdirhier(tempdir)
365
366 # Determine the logfile to generate
367 logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}'
368 logbase = logfmt.format(task=task, pid=os.getpid())
369
370 # Document the order of the tasks...
371 logorder = os.path.join(tempdir, 'log.task_order')
372 try:
373 with open(logorder, 'a') as logorderfile:
374 logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
375 except OSError:
376 logger.exception("Opening log file '%s'", logorder)
377 pass
378
379 # Setup the courtesy link to the logfn
380 loglink = os.path.join(tempdir, 'log.{0}'.format(task))
381 logfn = os.path.join(tempdir, logbase)
382 if loglink:
383 bb.utils.remove(loglink)
384
385 try:
386 os.symlink(logbase, loglink)
387 except OSError:
388 pass
389
390 prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
391 postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
392
393 class ErrorCheckHandler(logging.Handler):
394 def __init__(self):
395 self.triggered = False
396 logging.Handler.__init__(self, logging.ERROR)
397 def emit(self, record):
398 self.triggered = True
399
400 # Handle logfiles
401 si = open('/dev/null', 'r')
402 try:
403 bb.utils.mkdirhier(os.path.dirname(logfn))
404 logfile = open(logfn, 'w')
405 except OSError:
406 logger.exception("Opening log file '%s'", logfn)
407 pass
408
409 # Dup the existing fds so we dont lose them
410 osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
411 oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
412 ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
413
414 # Replace those fds with our own
415 os.dup2(si.fileno(), osi[1])
416 os.dup2(logfile.fileno(), oso[1])
417 os.dup2(logfile.fileno(), ose[1])
418
419 # Ensure Python logging goes to the logfile
420 handler = logging.StreamHandler(logfile)
421 handler.setFormatter(logformatter)
422 # Always enable full debug output into task logfiles
423 handler.setLevel(logging.DEBUG - 2)
424 bblogger.addHandler(handler)
425
426 errchk = ErrorCheckHandler()
427 bblogger.addHandler(errchk)
428
429 localdata.setVar('BB_LOGFILE', logfn)
430 localdata.setVar('BB_RUNTASK', task)
431
432 flags = localdata.getVarFlags(task)
433
434 event.fire(TaskStarted(task, logfn, flags, localdata), localdata)
435 try:
436 for func in (prefuncs or '').split():
437 exec_func(func, localdata)
438 exec_func(task, localdata)
439 for func in (postfuncs or '').split():
440 exec_func(func, localdata)
441 except FuncFailed as exc:
442 if quieterr:
443 event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
444 else:
445 errprinted = errchk.triggered
446 logger.error(str(exc))
447 event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
448 return 1
449 finally:
450 sys.stdout.flush()
451 sys.stderr.flush()
452
453 bblogger.removeHandler(handler)
454
455 # Restore the backup fds
456 os.dup2(osi[0], osi[1])
457 os.dup2(oso[0], oso[1])
458 os.dup2(ose[0], ose[1])
459
460 # Close the backup fds
461 os.close(osi[0])
462 os.close(oso[0])
463 os.close(ose[0])
464 si.close()
465
466 logfile.close()
467 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
468 logger.debug(2, "Zero size logfn %s, removing", logfn)
469 bb.utils.remove(logfn)
470 bb.utils.remove(loglink)
471 event.fire(TaskSucceeded(task, logfn, localdata), localdata)
472
473 if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'):
474 make_stamp(task, localdata)
475
476 return 0
477
478def exec_task(fn, task, d, profile = False):
479 try:
480 quieterr = False
481 if d.getVarFlag(task, "quieterrors") is not None:
482 quieterr = True
483
484 if profile:
485 profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task)
486 try:
487 import cProfile as profile
488 except:
489 import profile
490 prof = profile.Profile()
491 ret = profile.Profile.runcall(prof, _exec_task, fn, task, d, quieterr)
492 prof.dump_stats(profname)
493 bb.utils.process_profilelog(profname)
494
495 return ret
496 else:
497 return _exec_task(fn, task, d, quieterr)
498
499 except Exception:
500 from traceback import format_exc
501 if not quieterr:
502 logger.error("Build of %s failed" % (task))
503 logger.error(format_exc())
504 failedevent = TaskFailed(task, None, d, True)
505 event.fire(failedevent, d)
506 return 1
507
508def stamp_internal(taskname, d, file_name, baseonly=False):
509 """
510 Internal stamp helper function
511 Makes sure the stamp directory exists
512 Returns the stamp path+filename
513
514 In the bitbake core, d can be a CacheData and file_name will be set.
515 When called in task context, d will be a data store, file_name will not be set
516 """
517 taskflagname = taskname
518 if taskname.endswith("_setscene") and taskname != "do_setscene":
519 taskflagname = taskname.replace("_setscene", "")
520
521 if file_name:
522 stamp = d.stamp_base[file_name].get(taskflagname) or d.stamp[file_name]
523 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
524 else:
525 stamp = d.getVarFlag(taskflagname, 'stamp-base', True) or d.getVar('STAMP', True)
526 file_name = d.getVar('BB_FILENAME', True)
527 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
528
529 if baseonly:
530 return stamp
531
532 if not stamp:
533 return
534
535 stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
536
537 stampdir = os.path.dirname(stamp)
538 if bb.parse.cached_mtime_noerror(stampdir) == 0:
539 bb.utils.mkdirhier(stampdir)
540
541 return stamp
542
543def stamp_cleanmask_internal(taskname, d, file_name):
544 """
545 Internal stamp helper function to generate stamp cleaning mask
546 Returns the stamp path+filename
547
548 In the bitbake core, d can be a CacheData and file_name will be set.
549 When called in task context, d will be a data store, file_name will not be set
550 """
551 taskflagname = taskname
552 if taskname.endswith("_setscene") and taskname != "do_setscene":
553 taskflagname = taskname.replace("_setscene", "")
554
555 if file_name:
556 stamp = d.stamp_base_clean[file_name].get(taskflagname) or d.stampclean[file_name]
557 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
558 else:
559 stamp = d.getVarFlag(taskflagname, 'stamp-base-clean', True) or d.getVar('STAMPCLEAN', True)
560 file_name = d.getVar('BB_FILENAME', True)
561 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
562
563 if not stamp:
564 return []
565
566 cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo)
567
568 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
569
570def make_stamp(task, d, file_name = None):
571 """
572 Creates/updates a stamp for a given task
573 (d can be a data dict or dataCache)
574 """
575 cleanmask = stamp_cleanmask_internal(task, d, file_name)
576 for mask in cleanmask:
577 for name in glob.glob(mask):
578 # Preserve sigdata files in the stamps directory
579 if "sigdata" in name:
580 continue
581 # Preserve taint files in the stamps directory
582 if name.endswith('.taint'):
583 continue
584 os.unlink(name)
585
586 stamp = stamp_internal(task, d, file_name)
587 # Remove the file and recreate to force timestamp
588 # change on broken NFS filesystems
589 if stamp:
590 bb.utils.remove(stamp)
591 open(stamp, "w").close()
592
593 # If we're in task context, write out a signature file for each task
594 # as it completes
595 if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
596 stampbase = stamp_internal(task, d, None, True)
597 file_name = d.getVar('BB_FILENAME', True)
598 bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
599
600def del_stamp(task, d, file_name = None):
601 """
602 Removes a stamp for a given task
603 (d can be a data dict or dataCache)
604 """
605 stamp = stamp_internal(task, d, file_name)
606 bb.utils.remove(stamp)
607
608def write_taint(task, d, file_name = None):
609 """
610 Creates a "taint" file which will force the specified task and its
611 dependents to be re-run the next time by influencing the value of its
612 taskhash.
613 (d can be a data dict or dataCache)
614 """
615 import uuid
616 if file_name:
617 taintfn = d.stamp[file_name] + '.' + task + '.taint'
618 else:
619 taintfn = d.getVar('STAMP', True) + '.' + task + '.taint'
620 bb.utils.mkdirhier(os.path.dirname(taintfn))
621 # The specific content of the taint file is not really important,
622 # we just need it to be random, so a random UUID is used
623 with open(taintfn, 'w') as taintf:
624 taintf.write(str(uuid.uuid4()))
625
626def stampfile(taskname, d, file_name = None):
627 """
628 Return the stamp for a given task
629 (d can be a data dict or dataCache)
630 """
631 return stamp_internal(taskname, d, file_name)
632
633def add_tasks(tasklist, deltasklist, d):
634 task_deps = d.getVar('_task_deps')
635 if not task_deps:
636 task_deps = {}
637 if not 'tasks' in task_deps:
638 task_deps['tasks'] = []
639 if not 'parents' in task_deps:
640 task_deps['parents'] = {}
641
642 for task in tasklist:
643 task = d.expand(task)
644
645 if task in deltasklist:
646 continue
647
648 d.setVarFlag(task, 'task', 1)
649
650 if not task in task_deps['tasks']:
651 task_deps['tasks'].append(task)
652
653 flags = d.getVarFlags(task)
654 def getTask(name):
655 if not name in task_deps:
656 task_deps[name] = {}
657 if name in flags:
658 deptask = d.expand(flags[name])
659 task_deps[name][task] = deptask
660 getTask('depends')
661 getTask('rdepends')
662 getTask('deptask')
663 getTask('rdeptask')
664 getTask('recrdeptask')
665 getTask('recideptask')
666 getTask('nostamp')
667 getTask('fakeroot')
668 getTask('noexec')
669 getTask('umask')
670 task_deps['parents'][task] = []
671 if 'deps' in flags:
672 for dep in flags['deps']:
673 dep = d.expand(dep)
674 task_deps['parents'][task].append(dep)
675
676 # don't assume holding a reference
677 d.setVar('_task_deps', task_deps)
678
679def addtask(task, before, after, d):
680 if task[:3] != "do_":
681 task = "do_" + task
682
683 d.setVarFlag(task, "task", 1)
684 bbtasks = d.getVar('__BBTASKS') or []
685 if not task in bbtasks:
686 bbtasks.append(task)
687 d.setVar('__BBTASKS', bbtasks)
688
689 existing = d.getVarFlag(task, "deps") or []
690 if after is not None:
691 # set up deps for function
692 for entry in after.split():
693 if entry not in existing:
694 existing.append(entry)
695 d.setVarFlag(task, "deps", existing)
696 if before is not None:
697 # set up things that depend on this func
698 for entry in before.split():
699 existing = d.getVarFlag(entry, "deps") or []
700 if task not in existing:
701 d.setVarFlag(entry, "deps", [task] + existing)
702
703def deltask(task, d):
704 if task[:3] != "do_":
705 task = "do_" + task
706
707 bbtasks = d.getVar('__BBDELTASKS') or []
708 if not task in bbtasks:
709 bbtasks.append(task)
710 d.setVar('__BBDELTASKS', bbtasks)
711
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
new file mode 100644
index 0000000000..a1dde96425
--- /dev/null
+++ b/bitbake/lib/bb/cache.py
@@ -0,0 +1,837 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Cache implementation
5#
6# Caching of bitbake variables before task execution
7
8# Copyright (C) 2006 Richard Purdie
9# Copyright (C) 2012 Intel Corporation
10
11# but small sections based on code from bin/bitbake:
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
15# Copyright (C) 2005 Holger Hans Peter Freyther
16# Copyright (C) 2005 ROAD GmbH
17#
18# This program is free software; you can redistribute it and/or modify
19# it under the terms of the GNU General Public License version 2 as
20# published by the Free Software Foundation.
21#
22# This program is distributed in the hope that it will be useful,
23# but WITHOUT ANY WARRANTY; without even the implied warranty of
24# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
25# GNU General Public License for more details.
26#
27# You should have received a copy of the GNU General Public License along
28# with this program; if not, write to the Free Software Foundation, Inc.,
29# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
30
31
32import os
33import logging
34from collections import defaultdict
35import bb.utils
36
37logger = logging.getLogger("BitBake.Cache")
38
39try:
40 import cPickle as pickle
41except ImportError:
42 import pickle
43 logger.info("Importing cPickle failed. "
44 "Falling back to a very slow implementation.")
45
46__cache_version__ = "148"
47
48def getCacheFile(path, filename, data_hash):
49 return os.path.join(path, filename + "." + data_hash)
50
51# RecipeInfoCommon defines common data retrieving methods
52# from meta data for caches. CoreRecipeInfo as well as other
53# Extra RecipeInfo needs to inherit this class
54class RecipeInfoCommon(object):
55
56 @classmethod
57 def listvar(cls, var, metadata):
58 return cls.getvar(var, metadata).split()
59
60 @classmethod
61 def intvar(cls, var, metadata):
62 return int(cls.getvar(var, metadata) or 0)
63
64 @classmethod
65 def depvar(cls, var, metadata):
66 return bb.utils.explode_deps(cls.getvar(var, metadata))
67
68 @classmethod
69 def pkgvar(cls, var, packages, metadata):
70 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
71 for pkg in packages)
72
73 @classmethod
74 def taskvar(cls, var, tasks, metadata):
75 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
76 for task in tasks)
77
78 @classmethod
79 def flaglist(cls, flag, varlist, metadata, squash=False):
80 out_dict = dict((var, metadata.getVarFlag(var, flag, True))
81 for var in varlist)
82 if squash:
83 return dict((k,v) for (k,v) in out_dict.iteritems() if v)
84 else:
85 return out_dict
86
87 @classmethod
88 def getvar(cls, var, metadata):
89 return metadata.getVar(var, True) or ''
90
91
92class CoreRecipeInfo(RecipeInfoCommon):
93 __slots__ = ()
94
95 cachefile = "bb_cache.dat"
96
97 def __init__(self, filename, metadata):
98 self.file_depends = metadata.getVar('__depends', False)
99 self.timestamp = bb.parse.cached_mtime(filename)
100 self.variants = self.listvar('__VARIANTS', metadata) + ['']
101 self.appends = self.listvar('__BBAPPEND', metadata)
102 self.nocache = self.getvar('__BB_DONT_CACHE', metadata)
103
104 self.skipreason = self.getvar('__SKIPPED', metadata)
105 if self.skipreason:
106 self.pn = self.getvar('PN', metadata) or bb.parse.BBHandler.vars_from_file(filename,metadata)[0]
107 self.skipped = True
108 self.provides = self.depvar('PROVIDES', metadata)
109 self.rprovides = self.depvar('RPROVIDES', metadata)
110 return
111
112 self.tasks = metadata.getVar('__BBTASKS', False)
113
114 self.pn = self.getvar('PN', metadata)
115 self.packages = self.listvar('PACKAGES', metadata)
116 if not self.pn in self.packages:
117 self.packages.append(self.pn)
118
119 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
120 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
121
122 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
123
124 self.skipped = False
125 self.pe = self.getvar('PE', metadata)
126 self.pv = self.getvar('PV', metadata)
127 self.pr = self.getvar('PR', metadata)
128 self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
129 self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
130 self.stamp = self.getvar('STAMP', metadata)
131 self.stampclean = self.getvar('STAMPCLEAN', metadata)
132 self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
133 self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata)
134 self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
135 self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
136 self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
137 self.depends = self.depvar('DEPENDS', metadata)
138 self.provides = self.depvar('PROVIDES', metadata)
139 self.rdepends = self.depvar('RDEPENDS', metadata)
140 self.rprovides = self.depvar('RPROVIDES', metadata)
141 self.rrecommends = self.depvar('RRECOMMENDS', metadata)
142 self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
143 self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
144 self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
145 self.inherits = self.getvar('__inherit_cache', metadata)
146 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
147 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
148 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
149
150 @classmethod
151 def init_cacheData(cls, cachedata):
152 # CacheData in Core RecipeInfo Class
153 cachedata.task_deps = {}
154 cachedata.pkg_fn = {}
155 cachedata.pkg_pn = defaultdict(list)
156 cachedata.pkg_pepvpr = {}
157 cachedata.pkg_dp = {}
158
159 cachedata.stamp = {}
160 cachedata.stampclean = {}
161 cachedata.stamp_base = {}
162 cachedata.stamp_base_clean = {}
163 cachedata.stamp_extrainfo = {}
164 cachedata.file_checksums = {}
165 cachedata.fn_provides = {}
166 cachedata.pn_provides = defaultdict(list)
167 cachedata.all_depends = []
168
169 cachedata.deps = defaultdict(list)
170 cachedata.packages = defaultdict(list)
171 cachedata.providers = defaultdict(list)
172 cachedata.rproviders = defaultdict(list)
173 cachedata.packages_dynamic = defaultdict(list)
174
175 cachedata.rundeps = defaultdict(lambda: defaultdict(list))
176 cachedata.runrecs = defaultdict(lambda: defaultdict(list))
177 cachedata.possible_world = []
178 cachedata.universe_target = []
179 cachedata.hashfn = {}
180
181 cachedata.basetaskhash = {}
182 cachedata.inherits = {}
183 cachedata.fakerootenv = {}
184 cachedata.fakerootnoenv = {}
185 cachedata.fakerootdirs = {}
186
187 def add_cacheData(self, cachedata, fn):
188 cachedata.task_deps[fn] = self.task_deps
189 cachedata.pkg_fn[fn] = self.pn
190 cachedata.pkg_pn[self.pn].append(fn)
191 cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
192 cachedata.pkg_dp[fn] = self.defaultpref
193 cachedata.stamp[fn] = self.stamp
194 cachedata.stampclean[fn] = self.stampclean
195 cachedata.stamp_base[fn] = self.stamp_base
196 cachedata.stamp_base_clean[fn] = self.stamp_base_clean
197 cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
198 cachedata.file_checksums[fn] = self.file_checksums
199
200 provides = [self.pn]
201 for provide in self.provides:
202 if provide not in provides:
203 provides.append(provide)
204 cachedata.fn_provides[fn] = provides
205
206 for provide in provides:
207 cachedata.providers[provide].append(fn)
208 if provide not in cachedata.pn_provides[self.pn]:
209 cachedata.pn_provides[self.pn].append(provide)
210
211 for dep in self.depends:
212 if dep not in cachedata.deps[fn]:
213 cachedata.deps[fn].append(dep)
214 if dep not in cachedata.all_depends:
215 cachedata.all_depends.append(dep)
216
217 rprovides = self.rprovides
218 for package in self.packages:
219 cachedata.packages[package].append(fn)
220 rprovides += self.rprovides_pkg[package]
221
222 for rprovide in rprovides:
223 cachedata.rproviders[rprovide].append(fn)
224
225 for package in self.packages_dynamic:
226 cachedata.packages_dynamic[package].append(fn)
227
228 # Build hash of runtime depends and recommends
229 for package in self.packages + [self.pn]:
230 cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
231 cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
232
233 # Collect files we may need for possible world-dep
234 # calculations
235 if self.not_world:
236 logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
237 else:
238 cachedata.possible_world.append(fn)
239
240 # create a collection of all targets for sanity checking
241 # tasks, such as upstream versions, license, and tools for
242 # task and image creation.
243 cachedata.universe_target.append(self.pn)
244
245 cachedata.hashfn[fn] = self.hashfilename
246 for task, taskhash in self.basetaskhashes.iteritems():
247 identifier = '%s.%s' % (fn, task)
248 cachedata.basetaskhash[identifier] = taskhash
249
250 cachedata.inherits[fn] = self.inherits
251 cachedata.fakerootenv[fn] = self.fakerootenv
252 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
253 cachedata.fakerootdirs[fn] = self.fakerootdirs
254
255
256
257class Cache(object):
258 """
259 BitBake Cache implementation
260 """
261
262 def __init__(self, data, data_hash, caches_array):
263 # Pass caches_array information into Cache Constructor
264 # It will be used later for deciding whether we
265 # need extra cache file dump/load support
266 self.caches_array = caches_array
267 self.cachedir = data.getVar("CACHE", True)
268 self.clean = set()
269 self.checked = set()
270 self.depends_cache = {}
271 self.data = None
272 self.data_fn = None
273 self.cacheclean = True
274 self.data_hash = data_hash
275
276 if self.cachedir in [None, '']:
277 self.has_cache = False
278 logger.info("Not using a cache. "
279 "Set CACHE = <directory> to enable.")
280 return
281
282 self.has_cache = True
283 self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
284
285 logger.debug(1, "Using cache in '%s'", self.cachedir)
286 bb.utils.mkdirhier(self.cachedir)
287
288 cache_ok = True
289 if self.caches_array:
290 for cache_class in self.caches_array:
291 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
292 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
293 cache_ok = cache_ok and os.path.exists(cachefile)
294 cache_class.init_cacheData(self)
295 if cache_ok:
296 self.load_cachefile()
297 elif os.path.isfile(self.cachefile):
298 logger.info("Out of date cache found, rebuilding...")
299
300 def load_cachefile(self):
301 # Firstly, using core cache file information for
302 # valid checking
303 with open(self.cachefile, "rb") as cachefile:
304 pickled = pickle.Unpickler(cachefile)
305 try:
306 cache_ver = pickled.load()
307 bitbake_ver = pickled.load()
308 except Exception:
309 logger.info('Invalid cache, rebuilding...')
310 return
311
312 if cache_ver != __cache_version__:
313 logger.info('Cache version mismatch, rebuilding...')
314 return
315 elif bitbake_ver != bb.__version__:
316 logger.info('Bitbake version mismatch, rebuilding...')
317 return
318
319
320 cachesize = 0
321 previous_progress = 0
322 previous_percent = 0
323
324 # Calculate the correct cachesize of all those cache files
325 for cache_class in self.caches_array:
326 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
327 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
328 with open(cachefile, "rb") as cachefile:
329 cachesize += os.fstat(cachefile.fileno()).st_size
330
331 bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
332
333 for cache_class in self.caches_array:
334 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
335 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
336 with open(cachefile, "rb") as cachefile:
337 pickled = pickle.Unpickler(cachefile)
338 while cachefile:
339 try:
340 key = pickled.load()
341 value = pickled.load()
342 except Exception:
343 break
344 if self.depends_cache.has_key(key):
345 self.depends_cache[key].append(value)
346 else:
347 self.depends_cache[key] = [value]
348 # only fire events on even percentage boundaries
349 current_progress = cachefile.tell() + previous_progress
350 current_percent = 100 * current_progress / cachesize
351 if current_percent > previous_percent:
352 previous_percent = current_percent
353 bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
354 self.data)
355
356 previous_progress += current_progress
357
358 # Note: depends cache number is corresponding to the parsing file numbers.
359 # The same file has several caches, still regarded as one item in the cache
360 bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
361 len(self.depends_cache)),
362 self.data)
363
364
365 @staticmethod
366 def virtualfn2realfn(virtualfn):
367 """
368 Convert a virtual file name to a real one + the associated subclass keyword
369 """
370
371 fn = virtualfn
372 cls = ""
373 if virtualfn.startswith('virtual:'):
374 elems = virtualfn.split(':')
375 cls = ":".join(elems[1:-1])
376 fn = elems[-1]
377 return (fn, cls)
378
379 @staticmethod
380 def realfn2virtual(realfn, cls):
381 """
382 Convert a real filename + the associated subclass keyword to a virtual filename
383 """
384 if cls == "":
385 return realfn
386 return "virtual:" + cls + ":" + realfn
387
388 @classmethod
389 def loadDataFull(cls, virtualfn, appends, cfgData):
390 """
391 Return a complete set of data for fn.
392 To do this, we need to parse the file.
393 """
394
395 (fn, virtual) = cls.virtualfn2realfn(virtualfn)
396
397 logger.debug(1, "Parsing %s (full)", fn)
398
399 cfgData.setVar("__ONLYFINALISE", virtual or "default")
400 bb_data = cls.load_bbfile(fn, appends, cfgData)
401 return bb_data[virtual]
402
403 @classmethod
404 def parse(cls, filename, appends, configdata, caches_array):
405 """Parse the specified filename, returning the recipe information"""
406 infos = []
407 datastores = cls.load_bbfile(filename, appends, configdata)
408 depends = []
409 for variant, data in sorted(datastores.iteritems(),
410 key=lambda i: i[0],
411 reverse=True):
412 virtualfn = cls.realfn2virtual(filename, variant)
413 depends = depends + (data.getVar("__depends", False) or [])
414 if depends and not variant:
415 data.setVar("__depends", depends)
416
417 info_array = []
418 for cache_class in caches_array:
419 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
420 info = cache_class(filename, data)
421 info_array.append(info)
422 infos.append((virtualfn, info_array))
423
424 return infos
425
426 def load(self, filename, appends, configdata):
427 """Obtain the recipe information for the specified filename,
428 using cached values if available, otherwise parsing.
429
430 Note that if it does parse to obtain the info, it will not
431 automatically add the information to the cache or to your
432 CacheData. Use the add or add_info method to do so after
433 running this, or use loadData instead."""
434 cached = self.cacheValid(filename, appends)
435 if cached:
436 infos = []
437 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
438 info_array = self.depends_cache[filename]
439 for variant in info_array[0].variants:
440 virtualfn = self.realfn2virtual(filename, variant)
441 infos.append((virtualfn, self.depends_cache[virtualfn]))
442 else:
443 logger.debug(1, "Parsing %s", filename)
444 return self.parse(filename, appends, configdata, self.caches_array)
445
446 return cached, infos
447
448 def loadData(self, fn, appends, cfgData, cacheData):
449 """Load the recipe info for the specified filename,
450 parsing and adding to the cache if necessary, and adding
451 the recipe information to the supplied CacheData instance."""
452 skipped, virtuals = 0, 0
453
454 cached, infos = self.load(fn, appends, cfgData)
455 for virtualfn, info_array in infos:
456 if info_array[0].skipped:
457 logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
458 skipped += 1
459 else:
460 self.add_info(virtualfn, info_array, cacheData, not cached)
461 virtuals += 1
462
463 return cached, skipped, virtuals
464
465 def cacheValid(self, fn, appends):
466 """
467 Is the cache valid for fn?
468 Fast version, no timestamps checked.
469 """
470 if fn not in self.checked:
471 self.cacheValidUpdate(fn, appends)
472
473 # Is cache enabled?
474 if not self.has_cache:
475 return False
476 if fn in self.clean:
477 return True
478 return False
479
480 def cacheValidUpdate(self, fn, appends):
481 """
482 Is the cache valid for fn?
483 Make thorough (slower) checks including timestamps.
484 """
485 # Is cache enabled?
486 if not self.has_cache:
487 return False
488
489 self.checked.add(fn)
490
491 # File isn't in depends_cache
492 if not fn in self.depends_cache:
493 logger.debug(2, "Cache: %s is not cached", fn)
494 return False
495
496 mtime = bb.parse.cached_mtime_noerror(fn)
497
498 # Check file still exists
499 if mtime == 0:
500 logger.debug(2, "Cache: %s no longer exists", fn)
501 self.remove(fn)
502 return False
503
504 info_array = self.depends_cache[fn]
505 # Check the file's timestamp
506 if mtime != info_array[0].timestamp:
507 logger.debug(2, "Cache: %s changed", fn)
508 self.remove(fn)
509 return False
510
511 # Check dependencies are still valid
512 depends = info_array[0].file_depends
513 if depends:
514 for f, old_mtime in depends:
515 fmtime = bb.parse.cached_mtime_noerror(f)
516 # Check if file still exists
517 if old_mtime != 0 and fmtime == 0:
518 logger.debug(2, "Cache: %s's dependency %s was removed",
519 fn, f)
520 self.remove(fn)
521 return False
522
523 if (fmtime != old_mtime):
524 logger.debug(2, "Cache: %s's dependency %s changed",
525 fn, f)
526 self.remove(fn)
527 return False
528
529 if hasattr(info_array[0], 'file_checksums'):
530 for _, fl in info_array[0].file_checksums.items():
531 for f in fl.split():
532 if "*" in f:
533 continue
534 f, exist = f.split(":")
535 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
536 logger.debug(2, "Cache: %s's file checksum list file %s changed",
537 fn, f)
538 self.remove(fn)
539 return False
540
541 if appends != info_array[0].appends:
542 logger.debug(2, "Cache: appends for %s changed", fn)
543 logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
544 self.remove(fn)
545 return False
546
547 invalid = False
548 for cls in info_array[0].variants:
549 virtualfn = self.realfn2virtual(fn, cls)
550 self.clean.add(virtualfn)
551 if virtualfn not in self.depends_cache:
552 logger.debug(2, "Cache: %s is not cached", virtualfn)
553 invalid = True
554
555 # If any one of the variants is not present, mark as invalid for all
556 if invalid:
557 for cls in info_array[0].variants:
558 virtualfn = self.realfn2virtual(fn, cls)
559 if virtualfn in self.clean:
560 logger.debug(2, "Cache: Removing %s from cache", virtualfn)
561 self.clean.remove(virtualfn)
562 if fn in self.clean:
563 logger.debug(2, "Cache: Marking %s as not clean", fn)
564 self.clean.remove(fn)
565 return False
566
567 self.clean.add(fn)
568 return True
569
570 def remove(self, fn):
571 """
572 Remove a fn from the cache
573 Called from the parser in error cases
574 """
575 if fn in self.depends_cache:
576 logger.debug(1, "Removing %s from cache", fn)
577 del self.depends_cache[fn]
578 if fn in self.clean:
579 logger.debug(1, "Marking %s as unclean", fn)
580 self.clean.remove(fn)
581
582 def sync(self):
583 """
584 Save the cache
585 Called from the parser when complete (or exiting)
586 """
587
588 if not self.has_cache:
589 return
590
591 if self.cacheclean:
592 logger.debug(2, "Cache is clean, not saving.")
593 return
594
595 file_dict = {}
596 pickler_dict = {}
597 for cache_class in self.caches_array:
598 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
599 cache_class_name = cache_class.__name__
600 cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
601 file_dict[cache_class_name] = open(cachefile, "wb")
602 pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
603
604 pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
605 pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
606
607 try:
608 for key, info_array in self.depends_cache.iteritems():
609 for info in info_array:
610 if isinstance(info, RecipeInfoCommon):
611 cache_class_name = info.__class__.__name__
612 pickler_dict[cache_class_name].dump(key)
613 pickler_dict[cache_class_name].dump(info)
614 finally:
615 for cache_class in self.caches_array:
616 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
617 cache_class_name = cache_class.__name__
618 file_dict[cache_class_name].close()
619
620 del self.depends_cache
621
622 @staticmethod
623 def mtime(cachefile):
624 return bb.parse.cached_mtime_noerror(cachefile)
625
626 def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
627 if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
628 cacheData.add_from_recipeinfo(filename, info_array)
629
630 if watcher:
631 watcher(info_array[0].file_depends)
632
633 if not self.has_cache:
634 return
635
636 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
637 if parsed:
638 self.cacheclean = False
639 self.depends_cache[filename] = info_array
640
641 def add(self, file_name, data, cacheData, parsed=None):
642 """
643 Save data we need into the cache
644 """
645
646 realfn = self.virtualfn2realfn(file_name)[0]
647
648 info_array = []
649 for cache_class in self.caches_array:
650 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
651 info_array.append(cache_class(realfn, data))
652 self.add_info(file_name, info_array, cacheData, parsed)
653
654 @staticmethod
655 def load_bbfile(bbfile, appends, config):
656 """
657 Load and parse one .bb build file
658 Return the data and whether parsing resulted in the file being skipped
659 """
660 chdir_back = False
661
662 from bb import data, parse
663
664 # expand tmpdir to include this topdir
665 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
666 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
667 oldpath = os.path.abspath(os.getcwd())
668 parse.cached_mtime_noerror(bbfile_loc)
669 bb_data = data.init_db(config)
670 # The ConfHandler first looks if there is a TOPDIR and if not
671 # then it would call getcwd().
672 # Previously, we chdir()ed to bbfile_loc, called the handler
673 # and finally chdir()ed back, a couple of thousand times. We now
674 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
675 if not data.getVar('TOPDIR', bb_data):
676 chdir_back = True
677 data.setVar('TOPDIR', bbfile_loc, bb_data)
678 try:
679 if appends:
680 data.setVar('__BBAPPEND', " ".join(appends), bb_data)
681 bb_data = parse.handle(bbfile, bb_data)
682 if chdir_back:
683 os.chdir(oldpath)
684 return bb_data
685 except:
686 if chdir_back:
687 os.chdir(oldpath)
688 raise
689
690
691def init(cooker):
692 """
693 The Objective: Cache the minimum amount of data possible yet get to the
694 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
695
696 To do this, we intercept getVar calls and only cache the variables we see
697 being accessed. We rely on the cache getVar calls being made for all
698 variables bitbake might need to use to reach this stage. For each cached
699 file we need to track:
700
701 * Its mtime
702 * The mtimes of all its dependencies
703 * Whether it caused a parse.SkipRecipe exception
704
705 Files causing parsing errors are evicted from the cache.
706
707 """
708 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
709
710
711class CacheData(object):
712 """
713 The data structures we compile from the cached data
714 """
715
716 def __init__(self, caches_array):
717 self.caches_array = caches_array
718 for cache_class in self.caches_array:
719 if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
720 cache_class.init_cacheData(self)
721
722 # Direct cache variables
723 self.task_queues = {}
724 self.preferred = {}
725 self.tasks = {}
726 # Indirect Cache variables (set elsewhere)
727 self.ignored_dependencies = []
728 self.world_target = set()
729 self.bbfile_priority = {}
730
731 def add_from_recipeinfo(self, fn, info_array):
732 for info in info_array:
733 info.add_cacheData(self, fn)
734
735class MultiProcessCache(object):
736 """
737 BitBake multi-process cache implementation
738
739 Used by the codeparser & file checksum caches
740 """
741
742 def __init__(self):
743 self.cachefile = None
744 self.cachedata = self.create_cachedata()
745 self.cachedata_extras = self.create_cachedata()
746
747 def init_cache(self, d):
748 cachedir = (d.getVar("PERSISTENT_DIR", True) or
749 d.getVar("CACHE", True))
750 if cachedir in [None, '']:
751 return
752 bb.utils.mkdirhier(cachedir)
753 self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
754 logger.debug(1, "Using cache in '%s'", self.cachefile)
755
756 glf = bb.utils.lockfile(self.cachefile + ".lock")
757
758 try:
759 with open(self.cachefile, "rb") as f:
760 p = pickle.Unpickler(f)
761 data, version = p.load()
762 except:
763 bb.utils.unlockfile(glf)
764 return
765
766 bb.utils.unlockfile(glf)
767
768 if version != self.__class__.CACHE_VERSION:
769 return
770
771 self.cachedata = data
772
773 def create_cachedata(self):
774 data = [{}]
775 return data
776
777 def save_extras(self, d):
778 if not self.cachefile:
779 return
780
781 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
782
783 i = os.getpid()
784 lf = None
785 while not lf:
786 lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
787 if not lf or os.path.exists(self.cachefile + "-" + str(i)):
788 if lf:
789 bb.utils.unlockfile(lf)
790 lf = None
791 i = i + 1
792 continue
793
794 with open(self.cachefile + "-" + str(i), "wb") as f:
795 p = pickle.Pickler(f, -1)
796 p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
797
798 bb.utils.unlockfile(lf)
799 bb.utils.unlockfile(glf)
800
801 def merge_data(self, source, dest):
802 for j in range(0,len(dest)):
803 for h in source[j]:
804 if h not in dest[j]:
805 dest[j][h] = source[j][h]
806
807 def save_merge(self, d):
808 if not self.cachefile:
809 return
810
811 glf = bb.utils.lockfile(self.cachefile + ".lock")
812
813 data = self.cachedata
814
815 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
816 f = os.path.join(os.path.dirname(self.cachefile), f)
817 try:
818 with open(f, "rb") as fd:
819 p = pickle.Unpickler(fd)
820 extradata, version = p.load()
821 except (IOError, EOFError):
822 os.unlink(f)
823 continue
824
825 if version != self.__class__.CACHE_VERSION:
826 os.unlink(f)
827 continue
828
829 self.merge_data(extradata, data)
830 os.unlink(f)
831
832 with open(self.cachefile, "wb") as f:
833 p = pickle.Pickler(f, -1)
834 p.dump([data, self.__class__.CACHE_VERSION])
835
836 bb.utils.unlockfile(glf)
837
diff --git a/bitbake/lib/bb/cache_extra.py b/bitbake/lib/bb/cache_extra.py
new file mode 100644
index 0000000000..83f4959d6c
--- /dev/null
+++ b/bitbake/lib/bb/cache_extra.py
@@ -0,0 +1,75 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Extra RecipeInfo will be all defined in this file. Currently,
5# Only Hob (Image Creator) Requests some extra fields. So
6# HobRecipeInfo is defined. It's named HobRecipeInfo because it
7# is introduced by 'hob'. Users could also introduce other
8# RecipeInfo or simply use those already defined RecipeInfo.
9# In the following patch, this newly defined new extra RecipeInfo
10# will be dynamically loaded and used for loading/saving the extra
11# cache fields
12
13# Copyright (C) 2011, Intel Corporation. All rights reserved.
14
15# This program is free software; you can redistribute it and/or modify
16# it under the terms of the GNU General Public License version 2 as
17# published by the Free Software Foundation.
18#
19# This program is distributed in the hope that it will be useful,
20# but WITHOUT ANY WARRANTY; without even the implied warranty of
21# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22# GNU General Public License for more details.
23#
24# You should have received a copy of the GNU General Public License along
25# with this program; if not, write to the Free Software Foundation, Inc.,
26# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
27
28from bb.cache import RecipeInfoCommon
29
30class HobRecipeInfo(RecipeInfoCommon):
31 __slots__ = ()
32
33 classname = "HobRecipeInfo"
34 # please override this member with the correct data cache file
35 # such as (bb_cache.dat, bb_extracache_hob.dat)
36 cachefile = "bb_extracache_" + classname +".dat"
37
38 # override this member with the list of extra cache fields
39 # that this class will provide
40 cachefields = ['summary', 'license', 'section',
41 'description', 'homepage', 'bugtracker',
42 'prevision', 'files_info']
43
44 def __init__(self, filename, metadata):
45
46 self.summary = self.getvar('SUMMARY', metadata)
47 self.license = self.getvar('LICENSE', metadata)
48 self.section = self.getvar('SECTION', metadata)
49 self.description = self.getvar('DESCRIPTION', metadata)
50 self.homepage = self.getvar('HOMEPAGE', metadata)
51 self.bugtracker = self.getvar('BUGTRACKER', metadata)
52 self.prevision = self.getvar('PR', metadata)
53 self.files_info = self.getvar('FILES_INFO', metadata)
54
55 @classmethod
56 def init_cacheData(cls, cachedata):
57 # CacheData in Hob RecipeInfo Class
58 cachedata.summary = {}
59 cachedata.license = {}
60 cachedata.section = {}
61 cachedata.description = {}
62 cachedata.homepage = {}
63 cachedata.bugtracker = {}
64 cachedata.prevision = {}
65 cachedata.files_info = {}
66
67 def add_cacheData(self, cachedata, fn):
68 cachedata.summary[fn] = self.summary
69 cachedata.license[fn] = self.license
70 cachedata.section[fn] = self.section
71 cachedata.description[fn] = self.description
72 cachedata.homepage[fn] = self.homepage
73 cachedata.bugtracker[fn] = self.bugtracker
74 cachedata.prevision[fn] = self.prevision
75 cachedata.files_info[fn] = self.files_info
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py
new file mode 100644
index 0000000000..514ff0b1e6
--- /dev/null
+++ b/bitbake/lib/bb/checksum.py
@@ -0,0 +1,90 @@
1# Local file checksum cache implementation
2#
3# Copyright (C) 2012 Intel Corporation
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as
7# published by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License along
15# with this program; if not, write to the Free Software Foundation, Inc.,
16# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17
18import os
19import stat
20import bb.utils
21import logging
22from bb.cache import MultiProcessCache
23
24logger = logging.getLogger("BitBake.Cache")
25
26try:
27 import cPickle as pickle
28except ImportError:
29 import pickle
30 logger.info("Importing cPickle failed. "
31 "Falling back to a very slow implementation.")
32
33
34# mtime cache (non-persistent)
35# based upon the assumption that files do not change during bitbake run
36class FileMtimeCache(object):
37 cache = {}
38
39 def cached_mtime(self, f):
40 if f not in self.cache:
41 self.cache[f] = os.stat(f)[stat.ST_MTIME]
42 return self.cache[f]
43
44 def cached_mtime_noerror(self, f):
45 if f not in self.cache:
46 try:
47 self.cache[f] = os.stat(f)[stat.ST_MTIME]
48 except OSError:
49 return 0
50 return self.cache[f]
51
52 def update_mtime(self, f):
53 self.cache[f] = os.stat(f)[stat.ST_MTIME]
54 return self.cache[f]
55
56 def clear(self):
57 self.cache.clear()
58
59# Checksum + mtime cache (persistent)
60class FileChecksumCache(MultiProcessCache):
61 cache_file_name = "local_file_checksum_cache.dat"
62 CACHE_VERSION = 1
63
64 def __init__(self):
65 self.mtime_cache = FileMtimeCache()
66 MultiProcessCache.__init__(self)
67
68 def get_checksum(self, f):
69 entry = self.cachedata[0].get(f)
70 cmtime = self.mtime_cache.cached_mtime(f)
71 if entry:
72 (mtime, hashval) = entry
73 if cmtime == mtime:
74 return hashval
75 else:
76 bb.debug(2, "file %s changed mtime, recompute checksum" % f)
77
78 hashval = bb.utils.md5_file(f)
79 self.cachedata_extras[0][f] = (cmtime, hashval)
80 return hashval
81
82 def merge_data(self, source, dest):
83 for h in source[0]:
84 if h in dest:
85 (smtime, _) = source[0][h]
86 (dmtime, _) = dest[0][h]
87 if smtime > dmtime:
88 dest[0][h] = source[0][h]
89 else:
90 dest[0][h] = source[0][h]
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
new file mode 100644
index 0000000000..8b8f91a762
--- /dev/null
+++ b/bitbake/lib/bb/codeparser.py
@@ -0,0 +1,406 @@
1import ast
2import codegen
3import logging
4import os.path
5import bb.utils, bb.data
6from itertools import chain
7from pysh import pyshyacc, pyshlex, sherrors
8from bb.cache import MultiProcessCache
9
10
11logger = logging.getLogger('BitBake.CodeParser')
12
13try:
14 import cPickle as pickle
15except ImportError:
16 import pickle
17 logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
18
19
20def check_indent(codestr):
21 """If the code is indented, add a top level piece of code to 'remove' the indentation"""
22
23 i = 0
24 while codestr[i] in ["\n", "\t", " "]:
25 i = i + 1
26
27 if i == 0:
28 return codestr
29
30 if codestr[i-1] == "\t" or codestr[i-1] == " ":
31 return "if 1:\n" + codestr
32
33 return codestr
34
35
36# Basically pickle, in python 2.7.3 at least, does badly with data duplication
37# upon pickling and unpickling. Combine this with duplicate objects and things
38# are a mess.
39#
40# When the sets are originally created, python calls intern() on the set keys
41# which significantly improves memory usage. Sadly the pickle/unpickle process
42# doesn't call intern() on the keys and results in the same strings being duplicated
43# in memory. This also means pickle will save the same string multiple times in
44# the cache file.
45#
46# By having shell and python cacheline objects with setstate/getstate, we force
47# the object creation through our own routine where we can call intern (via internSet).
48#
49# We also use hashable frozensets and ensure we use references to these so that
50# duplicates can be removed, both in memory and in the resulting pickled data.
51#
52# By playing these games, the size of the cache file shrinks dramatically
53# meaning faster load times and the reloaded cache files also consume much less
54# memory. Smaller cache files, faster load times and lower memory usage is good.
55#
56# A custom getstate/setstate using tuples is actually worth 15% cachesize by
57# avoiding duplication of the attribute names!
58
59class SetCache(object):
60 def __init__(self):
61 self.setcache = {}
62
63 def internSet(self, items):
64
65 new = []
66 for i in items:
67 new.append(intern(i))
68 s = frozenset(new)
69 if hash(s) in self.setcache:
70 return self.setcache[hash(s)]
71 self.setcache[hash(s)] = s
72 return s
73
74codecache = SetCache()
75
76class pythonCacheLine(object):
77 def __init__(self, refs, execs, contains):
78 self.refs = codecache.internSet(refs)
79 self.execs = codecache.internSet(execs)
80 self.contains = {}
81 for c in contains:
82 self.contains[c] = codecache.internSet(contains[c])
83
84 def __getstate__(self):
85 return (self.refs, self.execs, self.contains)
86
87 def __setstate__(self, state):
88 (refs, execs, contains) = state
89 self.__init__(refs, execs, contains)
90 def __hash__(self):
91 l = (hash(self.refs), hash(self.execs))
92 for c in sorted(self.contains.keys()):
93 l = l + (c, hash(self.contains[c]))
94 return hash(l)
95
96class shellCacheLine(object):
97 def __init__(self, execs):
98 self.execs = codecache.internSet(execs)
99
100 def __getstate__(self):
101 return (self.execs)
102
103 def __setstate__(self, state):
104 (execs) = state
105 self.__init__(execs)
106 def __hash__(self):
107 return hash(self.execs)
108
109class CodeParserCache(MultiProcessCache):
110 cache_file_name = "bb_codeparser.dat"
111 CACHE_VERSION = 7
112
113 def __init__(self):
114 MultiProcessCache.__init__(self)
115 self.pythoncache = self.cachedata[0]
116 self.shellcache = self.cachedata[1]
117 self.pythoncacheextras = self.cachedata_extras[0]
118 self.shellcacheextras = self.cachedata_extras[1]
119
120 # To avoid duplication in the codeparser cache, keep
121 # a lookup of hashes of objects we already have
122 self.pythoncachelines = {}
123 self.shellcachelines = {}
124
125 def newPythonCacheLine(self, refs, execs, contains):
126 cacheline = pythonCacheLine(refs, execs, contains)
127 h = hash(cacheline)
128 if h in self.pythoncachelines:
129 return self.pythoncachelines[h]
130 self.pythoncachelines[h] = cacheline
131 return cacheline
132
133 def newShellCacheLine(self, execs):
134 cacheline = shellCacheLine(execs)
135 h = hash(cacheline)
136 if h in self.shellcachelines:
137 return self.shellcachelines[h]
138 self.shellcachelines[h] = cacheline
139 return cacheline
140
141 def init_cache(self, d):
142 MultiProcessCache.init_cache(self, d)
143
144 # cachedata gets re-assigned in the parent
145 self.pythoncache = self.cachedata[0]
146 self.shellcache = self.cachedata[1]
147
148 def create_cachedata(self):
149 data = [{}, {}]
150 return data
151
152codeparsercache = CodeParserCache()
153
154def parser_cache_init(d):
155 codeparsercache.init_cache(d)
156
157def parser_cache_save(d):
158 codeparsercache.save_extras(d)
159
160def parser_cache_savemerge(d):
161 codeparsercache.save_merge(d)
162
163Logger = logging.getLoggerClass()
164class BufferedLogger(Logger):
165 def __init__(self, name, level=0, target=None):
166 Logger.__init__(self, name)
167 self.setLevel(level)
168 self.buffer = []
169 self.target = target
170
171 def handle(self, record):
172 self.buffer.append(record)
173
174 def flush(self):
175 for record in self.buffer:
176 self.target.handle(record)
177 self.buffer = []
178
179class PythonParser():
180 getvars = (".getVar", ".appendVar", ".prependVar")
181 containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains", "bb.utils.contains_any")
182 execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
183
184 def warn(self, func, arg):
185 """Warn about calls of bitbake APIs which pass a non-literal
186 argument for the variable name, as we're not able to track such
187 a reference.
188 """
189
190 try:
191 funcstr = codegen.to_source(func)
192 argstr = codegen.to_source(arg)
193 except TypeError:
194 self.log.debug(2, 'Failed to convert function and argument to source form')
195 else:
196 self.log.debug(1, self.unhandled_message % (funcstr, argstr))
197
198 def visit_Call(self, node):
199 name = self.called_node_name(node.func)
200 if name and name.endswith(self.getvars) or name in self.containsfuncs:
201 if isinstance(node.args[0], ast.Str):
202 varname = node.args[0].s
203 if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
204 if varname not in self.contains:
205 self.contains[varname] = set()
206 self.contains[varname].add(node.args[1].s)
207 else:
208 self.references.add(node.args[0].s)
209 else:
210 self.warn(node.func, node.args[0])
211 elif name in self.execfuncs:
212 if isinstance(node.args[0], ast.Str):
213 self.var_execs.add(node.args[0].s)
214 else:
215 self.warn(node.func, node.args[0])
216 elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
217 self.execs.add(name)
218
219 def called_node_name(self, node):
220 """Given a called node, return its original string form"""
221 components = []
222 while node:
223 if isinstance(node, ast.Attribute):
224 components.append(node.attr)
225 node = node.value
226 elif isinstance(node, ast.Name):
227 components.append(node.id)
228 return '.'.join(reversed(components))
229 else:
230 break
231
232 def __init__(self, name, log):
233 self.var_execs = set()
234 self.contains = {}
235 self.execs = set()
236 self.references = set()
237 self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log)
238
239 self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
240 self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
241
242 def parse_python(self, node):
243 h = hash(str(node))
244
245 if h in codeparsercache.pythoncache:
246 self.references = set(codeparsercache.pythoncache[h].refs)
247 self.execs = set(codeparsercache.pythoncache[h].execs)
248 self.contains = {}
249 for i in codeparsercache.pythoncache[h].contains:
250 self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
251 return
252
253 if h in codeparsercache.pythoncacheextras:
254 self.references = set(codeparsercache.pythoncacheextras[h].refs)
255 self.execs = set(codeparsercache.pythoncacheextras[h].execs)
256 self.contains = {}
257 for i in codeparsercache.pythoncacheextras[h].contains:
258 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
259 return
260
261 code = compile(check_indent(str(node)), "<string>", "exec",
262 ast.PyCF_ONLY_AST)
263
264 for n in ast.walk(code):
265 if n.__class__.__name__ == "Call":
266 self.visit_Call(n)
267
268 self.execs.update(self.var_execs)
269
270 codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
271
272class ShellParser():
273 def __init__(self, name, log):
274 self.funcdefs = set()
275 self.allexecs = set()
276 self.execs = set()
277 self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log)
278 self.unhandled_template = "unable to handle non-literal command '%s'"
279 self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
280
281 def parse_shell(self, value):
282 """Parse the supplied shell code in a string, returning the external
283 commands it executes.
284 """
285
286 h = hash(str(value))
287
288 if h in codeparsercache.shellcache:
289 self.execs = set(codeparsercache.shellcache[h].execs)
290 return self.execs
291
292 if h in codeparsercache.shellcacheextras:
293 self.execs = set(codeparsercache.shellcacheextras[h].execs)
294 return self.execs
295
296 self._parse_shell(value)
297 self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
298
299 codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs)
300
301 return self.execs
302
303 def _parse_shell(self, value):
304 try:
305 tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
306 except pyshlex.NeedMore:
307 raise sherrors.ShellSyntaxError("Unexpected EOF")
308
309 for token in tokens:
310 self.process_tokens(token)
311
312 def process_tokens(self, tokens):
313 """Process a supplied portion of the syntax tree as returned by
314 pyshyacc.parse.
315 """
316
317 def function_definition(value):
318 self.funcdefs.add(value.name)
319 return [value.body], None
320
321 def case_clause(value):
322 # Element 0 of each item in the case is the list of patterns, and
323 # Element 1 of each item in the case is the list of commands to be
324 # executed when that pattern matches.
325 words = chain(*[item[0] for item in value.items])
326 cmds = chain(*[item[1] for item in value.items])
327 return cmds, words
328
329 def if_clause(value):
330 main = chain(value.cond, value.if_cmds)
331 rest = value.else_cmds
332 if isinstance(rest, tuple) and rest[0] == "elif":
333 return chain(main, if_clause(rest[1]))
334 else:
335 return chain(main, rest)
336
337 def simple_command(value):
338 return None, chain(value.words, (assign[1] for assign in value.assigns))
339
340 token_handlers = {
341 "and_or": lambda x: ((x.left, x.right), None),
342 "async": lambda x: ([x], None),
343 "brace_group": lambda x: (x.cmds, None),
344 "for_clause": lambda x: (x.cmds, x.items),
345 "function_definition": function_definition,
346 "if_clause": lambda x: (if_clause(x), None),
347 "pipeline": lambda x: (x.commands, None),
348 "redirect_list": lambda x: ([x.cmd], None),
349 "subshell": lambda x: (x.cmds, None),
350 "while_clause": lambda x: (chain(x.condition, x.cmds), None),
351 "until_clause": lambda x: (chain(x.condition, x.cmds), None),
352 "simple_command": simple_command,
353 "case_clause": case_clause,
354 }
355
356 for token in tokens:
357 name, value = token
358 try:
359 more_tokens, words = token_handlers[name](value)
360 except KeyError:
361 raise NotImplementedError("Unsupported token type " + name)
362
363 if more_tokens:
364 self.process_tokens(more_tokens)
365
366 if words:
367 self.process_words(words)
368
369 def process_words(self, words):
370 """Process a set of 'words' in pyshyacc parlance, which includes
371 extraction of executed commands from $() blocks, as well as grabbing
372 the command name argument.
373 """
374
375 words = list(words)
376 for word in list(words):
377 wtree = pyshlex.make_wordtree(word[1])
378 for part in wtree:
379 if not isinstance(part, list):
380 continue
381
382 if part[0] in ('`', '$('):
383 command = pyshlex.wordtree_as_string(part[1:-1])
384 self._parse_shell(command)
385
386 if word[0] in ("cmd_name", "cmd_word"):
387 if word in words:
388 words.remove(word)
389
390 usetoken = False
391 for word in words:
392 if word[0] in ("cmd_name", "cmd_word") or \
393 (usetoken and word[0] == "TOKEN"):
394 if "=" in word[1]:
395 usetoken = True
396 continue
397
398 cmd = word[1]
399 if cmd.startswith("$"):
400 self.log.debug(1, self.unhandled_template % cmd)
401 elif cmd == "eval":
402 command = " ".join(word for _, word in words[1:])
403 self._parse_shell(command)
404 else:
405 self.allexecs.add(cmd)
406 break
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
new file mode 100644
index 0000000000..60f9ac08aa
--- /dev/null
+++ b/bitbake/lib/bb/command.py
@@ -0,0 +1,451 @@
1"""
2BitBake 'Command' module
3
4Provide an interface to interact with the bitbake server through 'commands'
5"""
6
7# Copyright (C) 2006-2007 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22"""
23The bitbake server takes 'commands' from its UI/commandline.
24Commands are either synchronous or asynchronous.
25Async commands return data to the client in the form of events.
26Sync commands must only return data through the function return value
27and must not trigger events, directly or indirectly.
28Commands are queued in a CommandQueue
29"""
30
31import bb.event
32import bb.cooker
33
34class CommandCompleted(bb.event.Event):
35 pass
36
37class CommandExit(bb.event.Event):
38 def __init__(self, exitcode):
39 bb.event.Event.__init__(self)
40 self.exitcode = int(exitcode)
41
42class CommandFailed(CommandExit):
43 def __init__(self, message):
44 self.error = message
45 CommandExit.__init__(self, 1)
46
47class CommandError(Exception):
48 pass
49
50class Command:
51 """
52 A queue of asynchronous commands for bitbake
53 """
54 def __init__(self, cooker):
55 self.cooker = cooker
56 self.cmds_sync = CommandsSync()
57 self.cmds_async = CommandsAsync()
58
59 # FIXME Add lock for this
60 self.currentAsyncCommand = None
61
62 def runCommand(self, commandline, ro_only = False):
63 command = commandline.pop(0)
64 if hasattr(CommandsSync, command):
65 # Can run synchronous commands straight away
66 command_method = getattr(self.cmds_sync, command)
67 if ro_only:
68 if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
69 return None, "Not able to execute not readonly commands in readonly mode"
70 try:
71 result = command_method(self, commandline)
72 except CommandError as exc:
73 return None, exc.args[0]
74 except Exception:
75 import traceback
76 return None, traceback.format_exc()
77 else:
78 return result, None
79 if self.currentAsyncCommand is not None:
80 return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
81 if command not in CommandsAsync.__dict__:
82 return None, "No such command"
83 self.currentAsyncCommand = (command, commandline)
84 self.cooker.configuration.server_register_idlecallback(self.cooker.runCommands, self.cooker)
85 return True, None
86
87 def runAsyncCommand(self):
88 try:
89 if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
90 # updateCache will trigger a shutdown of the parser
91 # and then raise BBHandledException triggering an exit
92 self.cooker.updateCache()
93 return False
94 if self.currentAsyncCommand is not None:
95 (command, options) = self.currentAsyncCommand
96 commandmethod = getattr(CommandsAsync, command)
97 needcache = getattr( commandmethod, "needcache" )
98 if needcache and self.cooker.state != bb.cooker.state.running:
99 self.cooker.updateCache()
100 return True
101 else:
102 commandmethod(self.cmds_async, self, options)
103 return False
104 else:
105 return False
106 except KeyboardInterrupt as exc:
107 self.finishAsyncCommand("Interrupted")
108 return False
109 except SystemExit as exc:
110 arg = exc.args[0]
111 if isinstance(arg, basestring):
112 self.finishAsyncCommand(arg)
113 else:
114 self.finishAsyncCommand("Exited with %s" % arg)
115 return False
116 except Exception as exc:
117 import traceback
118 if isinstance(exc, bb.BBHandledException):
119 self.finishAsyncCommand("")
120 else:
121 self.finishAsyncCommand(traceback.format_exc())
122 return False
123
124 def finishAsyncCommand(self, msg=None, code=None):
125 if msg or msg == "":
126 bb.event.fire(CommandFailed(msg), self.cooker.event_data)
127 elif code:
128 bb.event.fire(CommandExit(code), self.cooker.event_data)
129 else:
130 bb.event.fire(CommandCompleted(), self.cooker.event_data)
131 self.currentAsyncCommand = None
132 self.cooker.finishcommand()
133
134class CommandsSync:
135 """
136 A class of synchronous commands
137 These should run quickly so as not to hurt interactive performance.
138 These must not influence any running synchronous command.
139 """
140
141 def stateShutdown(self, command, params):
142 """
143 Trigger cooker 'shutdown' mode
144 """
145 command.cooker.shutdown(False)
146
147 def stateForceShutdown(self, command, params):
148 """
149 Stop the cooker
150 """
151 command.cooker.shutdown(True)
152
153 def getAllKeysWithFlags(self, command, params):
154 """
155 Returns a dump of the global state. Call with
156 variable flags to be retrieved as params.
157 """
158 flaglist = params[0]
159 return command.cooker.getAllKeysWithFlags(flaglist)
160 getAllKeysWithFlags.readonly = True
161
162 def getVariable(self, command, params):
163 """
164 Read the value of a variable from data
165 """
166 varname = params[0]
167 expand = True
168 if len(params) > 1:
169 expand = (params[1] == "True")
170
171 return command.cooker.data.getVar(varname, expand)
172 getVariable.readonly = True
173
174 def setVariable(self, command, params):
175 """
176 Set the value of variable in data
177 """
178 varname = params[0]
179 value = str(params[1])
180 command.cooker.data.setVar(varname, value)
181
182 def setConfig(self, command, params):
183 """
184 Set the value of variable in configuration
185 """
186 varname = params[0]
187 value = str(params[1])
188 setattr(command.cooker.configuration, varname, value)
189
190 def enableDataTracking(self, command, params):
191 """
192 Enable history tracking for variables
193 """
194 command.cooker.enableDataTracking()
195
196 def disableDataTracking(self, command, params):
197 """
198 Disable history tracking for variables
199 """
200 command.cooker.disableDataTracking()
201
202 def setPrePostConfFiles(self, command, params):
203 prefiles = params[0].split()
204 postfiles = params[1].split()
205 command.cooker.configuration.prefile = prefiles
206 command.cooker.configuration.postfile = postfiles
207
208 def getCpuCount(self, command, params):
209 """
210 Get the CPU count on the bitbake server
211 """
212 return bb.utils.cpu_count()
213 getCpuCount.readonly = True
214
215 def matchFile(self, command, params):
216 fMatch = params[0]
217 return command.cooker.matchFile(fMatch)
218
219 def generateNewImage(self, command, params):
220 image = params[0]
221 base_image = params[1]
222 package_queue = params[2]
223 timestamp = params[3]
224 description = params[4]
225 return command.cooker.generateNewImage(image, base_image,
226 package_queue, timestamp, description)
227
228 def ensureDir(self, command, params):
229 directory = params[0]
230 bb.utils.mkdirhier(directory)
231
232 def setVarFile(self, command, params):
233 """
234 Save a variable in a file; used for saving in a configuration file
235 """
236 var = params[0]
237 val = params[1]
238 default_file = params[2]
239 op = params[3]
240 command.cooker.modifyConfigurationVar(var, val, default_file, op)
241
242 def removeVarFile(self, command, params):
243 """
244 Remove a variable declaration from a file
245 """
246 var = params[0]
247 command.cooker.removeConfigurationVar(var)
248
249 def createConfigFile(self, command, params):
250 """
251 Create an extra configuration file
252 """
253 name = params[0]
254 command.cooker.createConfigFile(name)
255
256 def setEventMask(self, command, params):
257 handlerNum = params[0]
258 llevel = params[1]
259 debug_domains = params[2]
260 mask = params[3]
261 return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask)
262
263 def setFeatures(self, command, params):
264 """
265 Set the cooker features to include the passed list of features
266 """
267 features = params[0]
268 command.cooker.setFeatures(features)
269
270 # although we change the internal state of the cooker, this is transparent since
271 # we always take and leave the cooker in state.initial
272 setFeatures.readonly = True
273
274 def updateConfig(self, command, params):
275 options = params[0]
276 command.cooker.updateConfigOpts(options)
277
278class CommandsAsync:
279 """
280 A class of asynchronous commands
281 These functions communicate via generated events.
282 Any function that requires metadata parsing should be here.
283 """
284
285 def buildFile(self, command, params):
286 """
287 Build a single specified .bb file
288 """
289 bfile = params[0]
290 task = params[1]
291
292 command.cooker.buildFile(bfile, task)
293 buildFile.needcache = False
294
295 def buildTargets(self, command, params):
296 """
297 Build a set of targets
298 """
299 pkgs_to_build = params[0]
300 task = params[1]
301
302 command.cooker.buildTargets(pkgs_to_build, task)
303 buildTargets.needcache = True
304
305 def generateDepTreeEvent(self, command, params):
306 """
307 Generate an event containing the dependency information
308 """
309 pkgs_to_build = params[0]
310 task = params[1]
311
312 command.cooker.generateDepTreeEvent(pkgs_to_build, task)
313 command.finishAsyncCommand()
314 generateDepTreeEvent.needcache = True
315
316 def generateDotGraph(self, command, params):
317 """
318 Dump dependency information to disk as .dot files
319 """
320 pkgs_to_build = params[0]
321 task = params[1]
322
323 command.cooker.generateDotGraphFiles(pkgs_to_build, task)
324 command.finishAsyncCommand()
325 generateDotGraph.needcache = True
326
327 def generateTargetsTree(self, command, params):
328 """
329 Generate a tree of buildable targets.
330 If klass is provided ensure all recipes that inherit the class are
331 included in the package list.
332 If pkg_list provided use that list (plus any extras brought in by
333 klass) rather than generating a tree for all packages.
334 """
335 klass = params[0]
336 pkg_list = params[1]
337
338 command.cooker.generateTargetsTree(klass, pkg_list)
339 command.finishAsyncCommand()
340 generateTargetsTree.needcache = True
341
342 def findCoreBaseFiles(self, command, params):
343 """
344 Find certain files in COREBASE directory. i.e. Layers
345 """
346 subdir = params[0]
347 filename = params[1]
348
349 command.cooker.findCoreBaseFiles(subdir, filename)
350 command.finishAsyncCommand()
351 findCoreBaseFiles.needcache = False
352
353 def findConfigFiles(self, command, params):
354 """
355 Find config files which provide appropriate values
356 for the passed configuration variable. i.e. MACHINE
357 """
358 varname = params[0]
359
360 command.cooker.findConfigFiles(varname)
361 command.finishAsyncCommand()
362 findConfigFiles.needcache = False
363
364 def findFilesMatchingInDir(self, command, params):
365 """
366 Find implementation files matching the specified pattern
367 in the requested subdirectory of a BBPATH
368 """
369 pattern = params[0]
370 directory = params[1]
371
372 command.cooker.findFilesMatchingInDir(pattern, directory)
373 command.finishAsyncCommand()
374 findFilesMatchingInDir.needcache = False
375
376 def findConfigFilePath(self, command, params):
377 """
378 Find the path of the requested configuration file
379 """
380 configfile = params[0]
381
382 command.cooker.findConfigFilePath(configfile)
383 command.finishAsyncCommand()
384 findConfigFilePath.needcache = False
385
386 def showVersions(self, command, params):
387 """
388 Show the currently selected versions
389 """
390 command.cooker.showVersions()
391 command.finishAsyncCommand()
392 showVersions.needcache = True
393
394 def showEnvironmentTarget(self, command, params):
395 """
396 Print the environment of a target recipe
397 (needs the cache to work out which recipe to use)
398 """
399 pkg = params[0]
400
401 command.cooker.showEnvironment(None, pkg)
402 command.finishAsyncCommand()
403 showEnvironmentTarget.needcache = True
404
405 def showEnvironment(self, command, params):
406 """
407 Print the standard environment
408 or if specified the environment for a specified recipe
409 """
410 bfile = params[0]
411
412 command.cooker.showEnvironment(bfile)
413 command.finishAsyncCommand()
414 showEnvironment.needcache = False
415
416 def parseFiles(self, command, params):
417 """
418 Parse the .bb files
419 """
420 command.cooker.updateCache()
421 command.finishAsyncCommand()
422 parseFiles.needcache = True
423
424 def compareRevisions(self, command, params):
425 """
426 Parse the .bb files
427 """
428 if bb.fetch.fetcher_compare_revisions(command.cooker.data):
429 command.finishAsyncCommand(code=1)
430 else:
431 command.finishAsyncCommand()
432 compareRevisions.needcache = True
433
434 def triggerEvent(self, command, params):
435 """
436 Trigger a certain event
437 """
438 event = params[0]
439 bb.event.fire(eval(event), command.cooker.data)
440 command.currentAsyncCommand = None
441 triggerEvent.needcache = False
442
443 def resetCooker(self, command, params):
444 """
445 Reset the cooker to its initial state, thus forcing a reparse for
446 any async command that has the needcache property set to True
447 """
448 command.cooker.reset()
449 command.finishAsyncCommand()
450 resetCooker.needcache = False
451
diff --git a/bitbake/lib/bb/compat.py b/bitbake/lib/bb/compat.py
new file mode 100644
index 0000000000..de1923d28a
--- /dev/null
+++ b/bitbake/lib/bb/compat.py
@@ -0,0 +1,6 @@
1"""Code pulled from future python versions, here for compatibility"""
2
3from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict
4from functools import total_ordering
5
6
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
new file mode 100644
index 0000000000..879d2ba065
--- /dev/null
+++ b/bitbake/lib/bb/cooker.py
@@ -0,0 +1,2025 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25from __future__ import print_function
26import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
33from cStringIO import StringIO
34from contextlib import closing
35from functools import wraps
36from collections import defaultdict
37import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
39import Queue
40import signal
41import prserv.serv
42import pyinotify
43
44logger = logging.getLogger("BitBake")
45collectlog = logging.getLogger("BitBake.Collection")
46buildlog = logging.getLogger("BitBake.Build")
47parselog = logging.getLogger("BitBake.Parsing")
48providerlog = logging.getLogger("BitBake.Provider")
49
50class NoSpecificMatch(bb.BBHandledException):
51 """
52 Exception raised when no or multiple file matches are found
53 """
54
55class NothingToBuild(Exception):
56 """
57 Exception raised when there is nothing to build
58 """
59
60class CollectionError(bb.BBHandledException):
61 """
62 Exception raised when layer configuration is incorrect
63 """
64
65class state:
66 initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
67
68
69class SkippedPackage:
70 def __init__(self, info = None, reason = None):
71 self.pn = None
72 self.skipreason = None
73 self.provides = None
74 self.rprovides = None
75
76 if info:
77 self.pn = info.pn
78 self.skipreason = info.skipreason
79 self.provides = info.provides
80 self.rprovides = info.rprovides
81 elif reason:
82 self.skipreason = reason
83
84
85class CookerFeatures(object):
86 _feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
87
88 def __init__(self):
89 self._features=set()
90
91 def setFeature(self, f):
92 # validate we got a request for a feature we support
93 if f not in CookerFeatures._feature_list:
94 return
95 self._features.add(f)
96
97 def __contains__(self, f):
98 return f in self._features
99
100 def __iter__(self):
101 return self._features.__iter__()
102
103 def next(self):
104 return self._features.next()
105
106
107#============================================================================#
108# BBCooker
109#============================================================================#
110class BBCooker:
111 """
112 Manages one bitbake build run
113 """
114
115 def __init__(self, configuration, featureSet = []):
116 self.recipecache = None
117 self.skiplist = {}
118 self.featureset = CookerFeatures()
119 for f in featureSet:
120 self.featureset.setFeature(f)
121
122 self.configuration = configuration
123
124 self.configwatcher = pyinotify.WatchManager()
125 self.configwatcher.bbseen = []
126 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
127 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
128 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
129 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
130 self.watcher = pyinotify.WatchManager()
131 self.watcher.bbseen = []
132 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
133
134
135 self.initConfigurationData()
136
137 self.inotify_modified_files = []
138
139 def _process_inotify_updates(server, notifier_list, abort):
140 for n in notifier_list:
141 if n.check_events(timeout=0):
142 # read notified events and enqeue them
143 n.read_events()
144 n.process_events()
145 return 1.0
146
147 self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
148
149 self.baseconfig_valid = True
150 self.parsecache_valid = False
151
152 # Take a lock so only one copy of bitbake can run against a given build
153 # directory at a time
154 lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
155 self.lock = bb.utils.lockfile(lockfile, False, False)
156 if not self.lock:
157 bb.fatal("Only one copy of bitbake should be run against a build directory")
158 try:
159 self.lock.seek(0)
160 self.lock.truncate()
161 if len(configuration.interface) >= 2:
162 self.lock.write("%s:%s\n" % (configuration.interface[0], configuration.interface[1]));
163 self.lock.flush()
164 except:
165 pass
166
167 # TOSTOP must not be set or our children will hang when they output
168 fd = sys.stdout.fileno()
169 if os.isatty(fd):
170 import termios
171 tcattr = termios.tcgetattr(fd)
172 if tcattr[3] & termios.TOSTOP:
173 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
174 tcattr[3] = tcattr[3] & ~termios.TOSTOP
175 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
176
177 self.command = bb.command.Command(self)
178 self.state = state.initial
179
180 self.parser = None
181
182 signal.signal(signal.SIGTERM, self.sigterm_exception)
183 # Let SIGHUP exit as SIGTERM
184 signal.signal(signal.SIGHUP, self.sigterm_exception)
185
186 def config_notifications(self, event):
187 if not event.path in self.inotify_modified_files:
188 self.inotify_modified_files.append(event.path)
189 self.baseconfig_valid = False
190
191 def notifications(self, event):
192 if not event.path in self.inotify_modified_files:
193 self.inotify_modified_files.append(event.path)
194 self.parsecache_valid = False
195
196 def add_filewatch(self, deps, watcher=None):
197 if not watcher:
198 watcher = self.watcher
199 for i in deps:
200 f = i[0]
201 if f in watcher.bbseen:
202 continue
203 watcher.bbseen.append(f)
204 while True:
205 # We try and add watches for files that don't exist but if they did, would influence
206 # the parser. The parent directory of these files may not exist, in which case we need
207 # to watch any parent that does exist for changes.
208 try:
209 watcher.add_watch(f, self.watchmask, quiet=False)
210 break
211 except pyinotify.WatchManagerError as e:
212 if 'ENOENT' in str(e):
213 f = os.path.dirname(f)
214 watcher.bbseen.append(f)
215 continue
216 raise
217
218 def sigterm_exception(self, signum, stackframe):
219 if signum == signal.SIGTERM:
220 bb.warn("Cooker recieved SIGTERM, shutting down...")
221 elif signum == signal.SIGHUP:
222 bb.warn("Cooker recieved SIGHUP, shutting down...")
223 self.state = state.forceshutdown
224
225 def setFeatures(self, features):
226 # we only accept a new feature set if we're in state initial, so we can reset without problems
227 if self.state != state.initial:
228 raise Exception("Illegal state for feature set change")
229 original_featureset = list(self.featureset)
230 for feature in features:
231 self.featureset.setFeature(feature)
232 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
233 if (original_featureset != list(self.featureset)):
234 self.reset()
235
236 def initConfigurationData(self):
237
238 self.state = state.initial
239 self.caches_array = []
240
241 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
242 self.enableDataTracking()
243
244 all_extra_cache_names = []
245 # We hardcode all known cache types in a single place, here.
246 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
247 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
248
249 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
250
251 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
252 # This is the entry point, no further check needed!
253 for var in caches_name_array:
254 try:
255 module_name, cache_name = var.split(':')
256 module = __import__(module_name, fromlist=(cache_name,))
257 self.caches_array.append(getattr(module, cache_name))
258 except ImportError as exc:
259 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
260 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
261
262 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
263 self.databuilder.parseBaseConfiguration()
264 self.data = self.databuilder.data
265 self.data_hash = self.databuilder.data_hash
266
267
268 # we log all events to a file if so directed
269 if self.configuration.writeeventlog:
270 import json, pickle
271 DEFAULT_EVENTFILE = self.configuration.writeeventlog
272 class EventLogWriteHandler():
273
274 class EventWriter():
275 def __init__(self, cooker):
276 self.file_inited = None
277 self.cooker = cooker
278 self.event_queue = []
279
280 def init_file(self):
281 try:
282 # delete the old log
283 os.remove(DEFAULT_EVENTFILE)
284 except:
285 pass
286
287 # write current configuration data
288 with open(DEFAULT_EVENTFILE, "w") as f:
289 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
290
291 def write_event(self, event):
292 with open(DEFAULT_EVENTFILE, "a") as f:
293 try:
294 f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
295 except Exception as e:
296 import traceback
297 print(e, traceback.format_exc(e))
298
299
300 def send(self, event):
301 event_class = event.__module__ + "." + event.__class__.__name__
302
303 # init on bb.event.BuildStarted
304 if self.file_inited is None:
305 if event_class == "bb.event.BuildStarted":
306 self.init_file()
307 self.file_inited = True
308
309 # write pending events
310 for e in self.event_queue:
311 self.write_event(e)
312
313 # also write the current event
314 self.write_event(event)
315
316 else:
317 # queue all events until the file is inited
318 self.event_queue.append(event)
319
320 else:
321 # we have the file, just write the event
322 self.write_event(event)
323
324 # set our handler's event processor
325 event = EventWriter(self) # self is the cooker here
326
327
328 # set up cooker features for this mock UI handler
329
330 # we need to write the dependency tree in the log
331 self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
332 # register the log file writer as UI Handler
333 bb.event.register_UIHhandler(EventLogWriteHandler())
334
335
336 #
337 # Special updated configuration we use for firing events
338 #
339 self.event_data = bb.data.createCopy(self.data)
340 bb.data.update_data(self.event_data)
341 bb.parse.init_parser(self.event_data)
342
343 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
344 self.disableDataTracking()
345
346 def enableDataTracking(self):
347 self.configuration.tracking = True
348 if hasattr(self, "data"):
349 self.data.enableTracking()
350
351 def disableDataTracking(self):
352 self.configuration.tracking = False
353 if hasattr(self, "data"):
354 self.data.disableTracking()
355
356 def modifyConfigurationVar(self, var, val, default_file, op):
357 if op == "append":
358 self.appendConfigurationVar(var, val, default_file)
359 elif op == "set":
360 self.saveConfigurationVar(var, val, default_file, "=")
361 elif op == "earlyAssign":
362 self.saveConfigurationVar(var, val, default_file, "?=")
363
364
365 def appendConfigurationVar(self, var, val, default_file):
366 #add append var operation to the end of default_file
367 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
368
369 total = "#added by hob"
370 total += "\n%s += \"%s\"\n" % (var, val)
371
372 with open(default_file, 'a') as f:
373 f.write(total)
374
375 #add to history
376 loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
377 self.data.appendVar(var, val, **loginfo)
378
379 def saveConfigurationVar(self, var, val, default_file, op):
380
381 replaced = False
382 #do not save if nothing changed
383 if str(val) == self.data.getVar(var):
384 return
385
386 conf_files = self.data.varhistory.get_variable_files(var)
387
388 #format the value when it is a list
389 if isinstance(val, list):
390 listval = ""
391 for value in val:
392 listval += "%s " % value
393 val = listval
394
395 topdir = self.data.getVar("TOPDIR")
396
397 #comment or replace operations made on var
398 for conf_file in conf_files:
399 if topdir in conf_file:
400 with open(conf_file, 'r') as f:
401 contents = f.readlines()
402
403 lines = self.data.varhistory.get_variable_lines(var, conf_file)
404 for line in lines:
405 total = ""
406 i = 0
407 for c in contents:
408 total += c
409 i = i + 1
410 if i==int(line):
411 end_index = len(total)
412 index = total.rfind(var, 0, end_index)
413
414 begin_line = total.count("\n",0,index)
415 end_line = int(line)
416
417 #check if the variable was saved before in the same way
418 #if true it replace the place where the variable was declared
419 #else it comments it
420 if contents[begin_line-1]== "#added by hob\n":
421 contents[begin_line] = "%s %s \"%s\"\n" % (var, op, val)
422 replaced = True
423 else:
424 for ii in range(begin_line, end_line):
425 contents[ii] = "#" + contents[ii]
426
427 with open(conf_file, 'w') as f:
428 f.writelines(contents)
429
430 if replaced == False:
431 #remove var from history
432 self.data.varhistory.del_var_history(var)
433
434 #add var to the end of default_file
435 default_file = bb.cookerdata.findConfigFile(default_file, self.data)
436
437 #add the variable on a single line, to be easy to replace the second time
438 total = "\n#added by hob"
439 total += "\n%s %s \"%s\"\n" % (var, op, val)
440
441 with open(default_file, 'a') as f:
442 f.write(total)
443
444 #add to history
445 loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
446 self.data.setVar(var, val, **loginfo)
447
448 def removeConfigurationVar(self, var):
449 conf_files = self.data.varhistory.get_variable_files(var)
450 topdir = self.data.getVar("TOPDIR")
451
452 for conf_file in conf_files:
453 if topdir in conf_file:
454 with open(conf_file, 'r') as f:
455 contents = f.readlines()
456
457 lines = self.data.varhistory.get_variable_lines(var, conf_file)
458 for line in lines:
459 total = ""
460 i = 0
461 for c in contents:
462 total += c
463 i = i + 1
464 if i==int(line):
465 end_index = len(total)
466 index = total.rfind(var, 0, end_index)
467
468 begin_line = total.count("\n",0,index)
469
470 #check if the variable was saved before in the same way
471 if contents[begin_line-1]== "#added by hob\n":
472 contents[begin_line-1] = contents[begin_line] = "\n"
473 else:
474 contents[begin_line] = "\n"
475 #remove var from history
476 self.data.varhistory.del_var_history(var, conf_file, line)
477 #remove variable
478 self.data.delVar(var)
479
480 with open(conf_file, 'w') as f:
481 f.writelines(contents)
482
483 def createConfigFile(self, name):
484 path = os.getcwd()
485 confpath = os.path.join(path, "conf", name)
486 open(confpath, 'w').close()
487
488 def parseConfiguration(self):
489 # Set log file verbosity
490 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0"))
491 if verboselogs:
492 bb.msg.loggerVerboseLogs = True
493
494 # Change nice level if we're asked to
495 nice = self.data.getVar("BB_NICE_LEVEL", True)
496 if nice:
497 curnice = os.nice(0)
498 nice = int(nice) - curnice
499 buildlog.verbose("Renice to %s " % os.nice(nice))
500
501 if self.recipecache:
502 del self.recipecache
503 self.recipecache = bb.cache.CacheData(self.caches_array)
504
505 self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
506
507 def updateConfigOpts(self,options):
508 for o in options:
509 setattr(self.configuration, o, options[o])
510
511 def runCommands(self, server, data, abort):
512 """
513 Run any queued asynchronous command
514 This is done by the idle handler so it runs in true context rather than
515 tied to any UI.
516 """
517
518 return self.command.runAsyncCommand()
519
520 def showVersions(self):
521
522 pkg_pn = self.recipecache.pkg_pn
523 (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecache, pkg_pn)
524
525 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
526 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
527
528 for p in sorted(pkg_pn):
529 pref = preferred_versions[p]
530 latest = latest_versions[p]
531
532 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
533 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
534
535 if pref == latest:
536 prefstr = ""
537
538 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
539
540 def showEnvironment(self, buildfile = None, pkgs_to_build = []):
541 """
542 Show the outer or per-recipe environment
543 """
544 fn = None
545 envdata = None
546
547 if buildfile:
548 # Parse the configuration here. We need to do it explicitly here since
549 # this showEnvironment() code path doesn't use the cache
550 self.parseConfiguration()
551
552 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
553 fn = self.matchFile(fn)
554 fn = bb.cache.Cache.realfn2virtual(fn, cls)
555 elif len(pkgs_to_build) == 1:
556 ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
557 if pkgs_to_build[0] in set(ignore.split()):
558 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
559
560 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort)
561
562 targetid = taskdata.getbuild_id(pkgs_to_build[0])
563 fnid = taskdata.build_targets[targetid][0]
564 fn = taskdata.fn_index[fnid]
565 else:
566 envdata = self.data
567
568 if fn:
569 try:
570 envdata = bb.cache.Cache.loadDataFull(fn, self.collection.get_file_appends(fn), self.data)
571 except Exception as e:
572 parselog.exception("Unable to read %s", fn)
573 raise
574
575 # Display history
576 with closing(StringIO()) as env:
577 self.data.inchistory.emit(env)
578 logger.plain(env.getvalue())
579
580 # emit variables and shell functions
581 data.update_data(envdata)
582 with closing(StringIO()) as env:
583 data.emit_env(env, envdata, True)
584 logger.plain(env.getvalue())
585
586 # emit the metadata which isnt valid shell
587 data.expandKeys(envdata)
588 for e in envdata.keys():
589 if data.getVarFlag( e, 'python', envdata ):
590 logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
591
592
593 def buildTaskData(self, pkgs_to_build, task, abort):
594 """
595 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
596 """
597 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
598
599 # A task of None means use the default task
600 if task is None:
601 task = self.configuration.cmd
602
603 fulltargetlist = self.checkPackages(pkgs_to_build)
604
605 localdata = data.createCopy(self.data)
606 bb.data.update_data(localdata)
607 bb.data.expandKeys(localdata)
608 taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist)
609
610 current = 0
611 runlist = []
612 for k in fulltargetlist:
613 ktask = task
614 if ":do_" in k:
615 k2 = k.split(":do_")
616 k = k2[0]
617 ktask = k2[1]
618 taskdata.add_provider(localdata, self.recipecache, k)
619 current += 1
620 runlist.append([k, "do_%s" % ktask])
621 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
622 taskdata.add_unresolved(localdata, self.recipecache)
623 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
624 return taskdata, runlist, fulltargetlist
625
626 def prepareTreeData(self, pkgs_to_build, task):
627 """
628 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
629 """
630
631 # We set abort to False here to prevent unbuildable targets raising
632 # an exception when we're just generating data
633 taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
634
635 return runlist, taskdata
636
637 ######## WARNING : this function requires cache_extra to be enabled ########
638
639 def generateTaskDepTreeData(self, pkgs_to_build, task):
640 """
641 Create a dependency graph of pkgs_to_build including reverse dependency
642 information.
643 """
644 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
645 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
646 rq.rqdata.prepare()
647 return self.buildDependTree(rq, taskdata)
648
649
650 def buildDependTree(self, rq, taskdata):
651 seen_fnids = []
652 depend_tree = {}
653 depend_tree["depends"] = {}
654 depend_tree["tdepends"] = {}
655 depend_tree["pn"] = {}
656 depend_tree["rdepends-pn"] = {}
657 depend_tree["packages"] = {}
658 depend_tree["rdepends-pkg"] = {}
659 depend_tree["rrecs-pkg"] = {}
660 depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
661
662 for task in xrange(len(rq.rqdata.runq_fnid)):
663 taskname = rq.rqdata.runq_task[task]
664 fnid = rq.rqdata.runq_fnid[task]
665 fn = taskdata.fn_index[fnid]
666 pn = self.recipecache.pkg_fn[fn]
667 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
668 if pn not in depend_tree["pn"]:
669 depend_tree["pn"][pn] = {}
670 depend_tree["pn"][pn]["filename"] = fn
671 depend_tree["pn"][pn]["version"] = version
672 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
673
674 # if we have extra caches, list all attributes they bring in
675 extra_info = []
676 for cache_class in self.caches_array:
677 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
678 cachefields = getattr(cache_class, 'cachefields', [])
679 extra_info = extra_info + cachefields
680
681 # for all attributes stored, add them to the dependency tree
682 for ei in extra_info:
683 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
684
685
686 for dep in rq.rqdata.runq_depends[task]:
687 depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
688 deppn = self.recipecache.pkg_fn[depfn]
689 dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
690 if not dotname in depend_tree["tdepends"]:
691 depend_tree["tdepends"][dotname] = []
692 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
693 if fnid not in seen_fnids:
694 seen_fnids.append(fnid)
695 packages = []
696
697 depend_tree["depends"][pn] = []
698 for dep in taskdata.depids[fnid]:
699 depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
700
701 depend_tree["rdepends-pn"][pn] = []
702 for rdep in taskdata.rdepids[fnid]:
703 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
704
705 rdepends = self.recipecache.rundeps[fn]
706 for package in rdepends:
707 depend_tree["rdepends-pkg"][package] = []
708 for rdepend in rdepends[package]:
709 depend_tree["rdepends-pkg"][package].append(rdepend)
710 packages.append(package)
711
712 rrecs = self.recipecache.runrecs[fn]
713 for package in rrecs:
714 depend_tree["rrecs-pkg"][package] = []
715 for rdepend in rrecs[package]:
716 depend_tree["rrecs-pkg"][package].append(rdepend)
717 if not package in packages:
718 packages.append(package)
719
720 for package in packages:
721 if package not in depend_tree["packages"]:
722 depend_tree["packages"][package] = {}
723 depend_tree["packages"][package]["pn"] = pn
724 depend_tree["packages"][package]["filename"] = fn
725 depend_tree["packages"][package]["version"] = version
726
727 return depend_tree
728
729 ######## WARNING : this function requires cache_extra to be enabled ########
730 def generatePkgDepTreeData(self, pkgs_to_build, task):
731 """
732 Create a dependency tree of pkgs_to_build, returning the data.
733 """
734 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
735 tasks_fnid = []
736 if len(taskdata.tasks_name) != 0:
737 for task in xrange(len(taskdata.tasks_name)):
738 tasks_fnid.append(taskdata.tasks_fnid[task])
739
740 seen_fnids = []
741 depend_tree = {}
742 depend_tree["depends"] = {}
743 depend_tree["pn"] = {}
744 depend_tree["rdepends-pn"] = {}
745 depend_tree["rdepends-pkg"] = {}
746 depend_tree["rrecs-pkg"] = {}
747
748 # if we have extra caches, list all attributes they bring in
749 extra_info = []
750 for cache_class in self.caches_array:
751 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
752 cachefields = getattr(cache_class, 'cachefields', [])
753 extra_info = extra_info + cachefields
754
755 for task in xrange(len(tasks_fnid)):
756 fnid = tasks_fnid[task]
757 fn = taskdata.fn_index[fnid]
758 pn = self.recipecache.pkg_fn[fn]
759
760 if pn not in depend_tree["pn"]:
761 depend_tree["pn"][pn] = {}
762 depend_tree["pn"][pn]["filename"] = fn
763 version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
764 depend_tree["pn"][pn]["version"] = version
765 rdepends = self.recipecache.rundeps[fn]
766 rrecs = self.recipecache.runrecs[fn]
767 depend_tree["pn"][pn]["inherits"] = self.recipecache.inherits.get(fn, None)
768
769 # for all extra attributes stored, add them to the dependency tree
770 for ei in extra_info:
771 depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
772
773 if fnid not in seen_fnids:
774 seen_fnids.append(fnid)
775
776 depend_tree["depends"][pn] = []
777 for dep in taskdata.depids[fnid]:
778 item = taskdata.build_names_index[dep]
779 pn_provider = ""
780 targetid = taskdata.getbuild_id(item)
781 if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
782 id = taskdata.build_targets[targetid][0]
783 fn_provider = taskdata.fn_index[id]
784 pn_provider = self.recipecache.pkg_fn[fn_provider]
785 else:
786 pn_provider = item
787 depend_tree["depends"][pn].append(pn_provider)
788
789 depend_tree["rdepends-pn"][pn] = []
790 for rdep in taskdata.rdepids[fnid]:
791 item = taskdata.run_names_index[rdep]
792 pn_rprovider = ""
793 targetid = taskdata.getrun_id(item)
794 if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
795 id = taskdata.run_targets[targetid][0]
796 fn_rprovider = taskdata.fn_index[id]
797 pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
798 else:
799 pn_rprovider = item
800 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
801
802 depend_tree["rdepends-pkg"].update(rdepends)
803 depend_tree["rrecs-pkg"].update(rrecs)
804
805 return depend_tree
806
807 def generateDepTreeEvent(self, pkgs_to_build, task):
808 """
809 Create a task dependency graph of pkgs_to_build.
810 Generate an event with the result
811 """
812 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
813 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
814
815 def generateDotGraphFiles(self, pkgs_to_build, task):
816 """
817 Create a task dependency graph of pkgs_to_build.
818 Save the result to a set of .dot files.
819 """
820
821 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
822
823 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
824 depends_file = file('pn-depends.dot', 'w' )
825 buildlist_file = file('pn-buildlist', 'w' )
826 print("digraph depends {", file=depends_file)
827 for pn in depgraph["pn"]:
828 fn = depgraph["pn"][pn]["filename"]
829 version = depgraph["pn"][pn]["version"]
830 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
831 print("%s" % pn, file=buildlist_file)
832 buildlist_file.close()
833 logger.info("PN build list saved to 'pn-buildlist'")
834 for pn in depgraph["depends"]:
835 for depend in depgraph["depends"][pn]:
836 print('"%s" -> "%s"' % (pn, depend), file=depends_file)
837 for pn in depgraph["rdepends-pn"]:
838 for rdepend in depgraph["rdepends-pn"][pn]:
839 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
840 print("}", file=depends_file)
841 logger.info("PN dependencies saved to 'pn-depends.dot'")
842
843 depends_file = file('package-depends.dot', 'w' )
844 print("digraph depends {", file=depends_file)
845 for package in depgraph["packages"]:
846 pn = depgraph["packages"][package]["pn"]
847 fn = depgraph["packages"][package]["filename"]
848 version = depgraph["packages"][package]["version"]
849 if package == pn:
850 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
851 else:
852 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
853 for depend in depgraph["depends"][pn]:
854 print('"%s" -> "%s"' % (package, depend), file=depends_file)
855 for package in depgraph["rdepends-pkg"]:
856 for rdepend in depgraph["rdepends-pkg"][package]:
857 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
858 for package in depgraph["rrecs-pkg"]:
859 for rdepend in depgraph["rrecs-pkg"][package]:
860 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
861 print("}", file=depends_file)
862 logger.info("Package dependencies saved to 'package-depends.dot'")
863
864 tdepends_file = file('task-depends.dot', 'w' )
865 print("digraph depends {", file=tdepends_file)
866 for task in depgraph["tdepends"]:
867 (pn, taskname) = task.rsplit(".", 1)
868 fn = depgraph["pn"][pn]["filename"]
869 version = depgraph["pn"][pn]["version"]
870 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
871 for dep in depgraph["tdepends"][task]:
872 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
873 print("}", file=tdepends_file)
874 logger.info("Task dependencies saved to 'task-depends.dot'")
875
876 def show_appends_with_no_recipes( self ):
877 appends_without_recipes = [self.collection.appendlist[recipe]
878 for recipe in self.collection.appendlist
879 if recipe not in self.collection.appliedappendlist]
880 if appends_without_recipes:
881 appendlines = (' %s' % append
882 for appends in appends_without_recipes
883 for append in appends)
884 msg = 'No recipes available for:\n%s' % '\n'.join(appendlines)
885 warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
886 self.data, False) or "no"
887 if warn_only.lower() in ("1", "yes", "true"):
888 bb.warn(msg)
889 else:
890 bb.fatal(msg)
891
892 def handlePrefProviders(self):
893
894 localdata = data.createCopy(self.data)
895 bb.data.update_data(localdata)
896 bb.data.expandKeys(localdata)
897
898 # Handle PREFERRED_PROVIDERS
899 for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split():
900 try:
901 (providee, provider) = p.split(':')
902 except:
903 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
904 continue
905 if providee in self.recipecache.preferred and self.recipecache.preferred[providee] != provider:
906 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecache.preferred[providee])
907 self.recipecache.preferred[providee] = provider
908
909 def findCoreBaseFiles(self, subdir, configfile):
910 corebase = self.data.getVar('COREBASE', True) or ""
911 paths = []
912 for root, dirs, files in os.walk(corebase + '/' + subdir):
913 for d in dirs:
914 configfilepath = os.path.join(root, d, configfile)
915 if os.path.exists(configfilepath):
916 paths.append(os.path.join(root, d))
917
918 if paths:
919 bb.event.fire(bb.event.CoreBaseFilesFound(paths), self.data)
920
921 def findConfigFilePath(self, configfile):
922 """
923 Find the location on disk of configfile and if it exists and was parsed by BitBake
924 emit the ConfigFilePathFound event with the path to the file.
925 """
926 path = bb.cookerdata.findConfigFile(configfile, self.data)
927 if not path:
928 return
929
930 # Generate a list of parsed configuration files by searching the files
931 # listed in the __depends and __base_depends variables with a .conf suffix.
932 conffiles = []
933 dep_files = self.data.getVar('__base_depends') or []
934 dep_files = dep_files + (self.data.getVar('__depends') or [])
935
936 for f in dep_files:
937 if f[0].endswith(".conf"):
938 conffiles.append(f[0])
939
940 _, conf, conffile = path.rpartition("conf/")
941 match = os.path.join(conf, conffile)
942 # Try and find matches for conf/conffilename.conf as we don't always
943 # have the full path to the file.
944 for cfg in conffiles:
945 if cfg.endswith(match):
946 bb.event.fire(bb.event.ConfigFilePathFound(path),
947 self.data)
948 break
949
950 def findFilesMatchingInDir(self, filepattern, directory):
951 """
952 Searches for files matching the regex 'pattern' which are children of
953 'directory' in each BBPATH. i.e. to find all rootfs package classes available
954 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
955 or to find all machine configuration files one could call:
956 findFilesMatchingInDir(self, 'conf/machines', 'conf')
957 """
958
959 matches = []
960 p = re.compile(re.escape(filepattern))
961 bbpaths = self.data.getVar('BBPATH', True).split(':')
962 for path in bbpaths:
963 dirpath = os.path.join(path, directory)
964 if os.path.exists(dirpath):
965 for root, dirs, files in os.walk(dirpath):
966 for f in files:
967 if p.search(f):
968 matches.append(f)
969
970 if matches:
971 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
972
973 def findConfigFiles(self, varname):
974 """
975 Find config files which are appropriate values for varname.
976 i.e. MACHINE, DISTRO
977 """
978 possible = []
979 var = varname.lower()
980
981 data = self.data
982 # iterate configs
983 bbpaths = data.getVar('BBPATH', True).split(':')
984 for path in bbpaths:
985 confpath = os.path.join(path, "conf", var)
986 if os.path.exists(confpath):
987 for root, dirs, files in os.walk(confpath):
988 # get all child files, these are appropriate values
989 for f in files:
990 val, sep, end = f.rpartition('.')
991 if end == 'conf':
992 possible.append(val)
993
994 if possible:
995 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
996
997 def findInheritsClass(self, klass):
998 """
999 Find all recipes which inherit the specified class
1000 """
1001 pkg_list = []
1002
1003 for pfn in self.recipecache.pkg_fn:
1004 inherits = self.recipecache.inherits.get(pfn, None)
1005 if inherits and inherits.count(klass) > 0:
1006 pkg_list.append(self.recipecache.pkg_fn[pfn])
1007
1008 return pkg_list
1009
1010 def generateTargetsTree(self, klass=None, pkgs=[]):
1011 """
1012 Generate a dependency tree of buildable targets
1013 Generate an event with the result
1014 """
1015 # if the caller hasn't specified a pkgs list default to universe
1016 if not len(pkgs):
1017 pkgs = ['universe']
1018 # if inherited_class passed ensure all recipes which inherit the
1019 # specified class are included in pkgs
1020 if klass:
1021 extra_pkgs = self.findInheritsClass(klass)
1022 pkgs = pkgs + extra_pkgs
1023
1024 # generate a dependency tree for all our packages
1025 tree = self.generatePkgDepTreeData(pkgs, 'build')
1026 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1027
1028 def buildWorldTargetList(self):
1029 """
1030 Build package list for "bitbake world"
1031 """
1032 parselog.debug(1, "collating packages for \"world\"")
1033 for f in self.recipecache.possible_world:
1034 terminal = True
1035 pn = self.recipecache.pkg_fn[f]
1036
1037 for p in self.recipecache.pn_provides[pn]:
1038 if p.startswith('virtual/'):
1039 parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
1040 terminal = False
1041 break
1042 for pf in self.recipecache.providers[p]:
1043 if self.recipecache.pkg_fn[pf] != pn:
1044 parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
1045 terminal = False
1046 break
1047 if terminal:
1048 self.recipecache.world_target.add(pn)
1049
1050 def interactiveMode( self ):
1051 """Drop off into a shell"""
1052 try:
1053 from bb import shell
1054 except ImportError:
1055 parselog.exception("Interactive mode not available")
1056 sys.exit(1)
1057 else:
1058 shell.start( self )
1059
1060
1061 def handleCollections( self, collections ):
1062 """Handle collections"""
1063 errors = False
1064 self.recipecache.bbfile_config_priorities = []
1065 if collections:
1066 collection_priorities = {}
1067 collection_depends = {}
1068 collection_list = collections.split()
1069 min_prio = 0
1070 for c in collection_list:
1071 # Get collection priority if defined explicitly
1072 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True)
1073 if priority:
1074 try:
1075 prio = int(priority)
1076 except ValueError:
1077 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1078 errors = True
1079 if min_prio == 0 or prio < min_prio:
1080 min_prio = prio
1081 collection_priorities[c] = prio
1082 else:
1083 collection_priorities[c] = None
1084
1085 # Check dependencies and store information for priority calculation
1086 deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
1087 if deps:
1088 depnamelist = []
1089 deplist = deps.split()
1090 for dep in deplist:
1091 depsplit = dep.split(':')
1092 if len(depsplit) > 1:
1093 try:
1094 depver = int(depsplit[1])
1095 except ValueError:
1096 parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
1097 errors = True
1098 continue
1099 else:
1100 depver = None
1101 dep = depsplit[0]
1102 depnamelist.append(dep)
1103
1104 if dep in collection_list:
1105 if depver:
1106 layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
1107 if layerver:
1108 try:
1109 lver = int(layerver)
1110 except ValueError:
1111 parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
1112 errors = True
1113 continue
1114 if lver != depver:
1115 parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
1116 errors = True
1117 else:
1118 parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
1119 errors = True
1120 else:
1121 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1122 errors = True
1123 collection_depends[c] = depnamelist
1124 else:
1125 collection_depends[c] = []
1126
1127 # Recursively work out collection priorities based on dependencies
1128 def calc_layer_priority(collection):
1129 if not collection_priorities[collection]:
1130 max_depprio = min_prio
1131 for dep in collection_depends[collection]:
1132 calc_layer_priority(dep)
1133 depprio = collection_priorities[dep]
1134 if depprio > max_depprio:
1135 max_depprio = depprio
1136 max_depprio += 1
1137 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1138 collection_priorities[collection] = max_depprio
1139
1140 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1141 for c in collection_list:
1142 calc_layer_priority(c)
1143 regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True)
1144 if regex == None:
1145 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1146 errors = True
1147 continue
1148 try:
1149 cre = re.compile(regex)
1150 except re.error:
1151 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1152 errors = True
1153 continue
1154 self.recipecache.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1155 if errors:
1156 # We've already printed the actual error(s)
1157 raise CollectionError("Errors during parsing layer configuration")
1158
1159 def buildSetVars(self):
1160 """
1161 Setup any variables needed before starting a build
1162 """
1163 if not self.data.getVar("BUILDNAME"):
1164 self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'))
1165 self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()))
1166
1167 def matchFiles(self, bf):
1168 """
1169 Find the .bb files which match the expression in 'buildfile'.
1170 """
1171 if bf.startswith("/") or bf.startswith("../"):
1172 bf = os.path.abspath(bf)
1173
1174 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1175 filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data)
1176 try:
1177 os.stat(bf)
1178 bf = os.path.abspath(bf)
1179 return [bf]
1180 except OSError:
1181 regexp = re.compile(bf)
1182 matches = []
1183 for f in filelist:
1184 if regexp.search(f) and os.path.isfile(f):
1185 matches.append(f)
1186 return matches
1187
1188 def matchFile(self, buildfile):
1189 """
1190 Find the .bb file which matches the expression in 'buildfile'.
1191 Raise an error if multiple files
1192 """
1193 matches = self.matchFiles(buildfile)
1194 if len(matches) != 1:
1195 if matches:
1196 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1197 if matches:
1198 for f in matches:
1199 msg += "\n %s" % f
1200 parselog.error(msg)
1201 else:
1202 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1203 raise NoSpecificMatch
1204 return matches[0]
1205
1206 def buildFile(self, buildfile, task):
1207 """
1208 Build the file matching regexp buildfile
1209 """
1210
1211 # Too many people use -b because they think it's how you normally
1212 # specify a target to be built, so show a warning
1213 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1214
1215 # Parse the configuration here. We need to do it explicitly here since
1216 # buildFile() doesn't use the cache
1217 self.parseConfiguration()
1218
1219 # If we are told to do the None task then query the default task
1220 if (task == None):
1221 task = self.configuration.cmd
1222
1223 fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
1224 fn = self.matchFile(fn)
1225
1226 self.buildSetVars()
1227
1228 infos = bb.cache.Cache.parse(fn, self.collection.get_file_appends(fn), \
1229 self.data,
1230 self.caches_array)
1231 infos = dict(infos)
1232
1233 fn = bb.cache.Cache.realfn2virtual(fn, cls)
1234 try:
1235 info_array = infos[fn]
1236 except KeyError:
1237 bb.fatal("%s does not exist" % fn)
1238
1239 if info_array[0].skipped:
1240 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1241
1242 self.recipecache.add_from_recipeinfo(fn, info_array)
1243
1244 # Tweak some variables
1245 item = info_array[0].pn
1246 self.recipecache.ignored_dependencies = set()
1247 self.recipecache.bbfile_priority[fn] = 1
1248
1249 # Remove external dependencies
1250 self.recipecache.task_deps[fn]['depends'] = {}
1251 self.recipecache.deps[fn] = []
1252 self.recipecache.rundeps[fn] = []
1253 self.recipecache.runrecs[fn] = []
1254
1255 # Invalidate task for target if force mode active
1256 if self.configuration.force:
1257 logger.verbose("Invalidate task %s, %s", task, fn)
1258 bb.parse.siggen.invalidate_task('do_%s' % task, self.recipecache, fn)
1259
1260 # Setup taskdata structure
1261 taskdata = bb.taskdata.TaskData(self.configuration.abort)
1262 taskdata.add_provider(self.data, self.recipecache, item)
1263
1264 buildname = self.data.getVar("BUILDNAME")
1265 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data)
1266
1267 # Execute the runqueue
1268 runlist = [[item, "do_%s" % task]]
1269
1270 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1271
1272 def buildFileIdle(server, rq, abort):
1273
1274 msg = None
1275 if abort or self.state == state.forceshutdown:
1276 rq.finish_runqueue(True)
1277 msg = "Forced shutdown"
1278 elif self.state == state.shutdown:
1279 rq.finish_runqueue(False)
1280 msg = "Stopped build"
1281 failures = 0
1282 try:
1283 retval = rq.execute_runqueue()
1284 except runqueue.TaskFailure as exc:
1285 failures += len(exc.args)
1286 retval = False
1287 except SystemExit as exc:
1288 self.command.finishAsyncCommand()
1289 return False
1290
1291 if not retval:
1292 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data)
1293 self.command.finishAsyncCommand(msg)
1294 return False
1295 if retval is True:
1296 return True
1297 return retval
1298
1299 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1300
1301 def buildTargets(self, targets, task):
1302 """
1303 Attempt to build the targets specified
1304 """
1305
1306 def buildTargetsIdle(server, rq, abort):
1307 msg = None
1308 if abort or self.state == state.forceshutdown:
1309 rq.finish_runqueue(True)
1310 msg = "Forced shutdown"
1311 elif self.state == state.shutdown:
1312 rq.finish_runqueue(False)
1313 msg = "Stopped build"
1314 failures = 0
1315 try:
1316 retval = rq.execute_runqueue()
1317 except runqueue.TaskFailure as exc:
1318 failures += len(exc.args)
1319 retval = False
1320 except SystemExit as exc:
1321 self.command.finishAsyncCommand()
1322 return False
1323
1324 if not retval:
1325 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data)
1326 self.command.finishAsyncCommand(msg)
1327 return False
1328 if retval is True:
1329 return True
1330 return retval
1331
1332 self.buildSetVars()
1333
1334 taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
1335
1336 buildname = self.data.getVar("BUILDNAME")
1337 bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data)
1338
1339 rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
1340 if 'universe' in targets:
1341 rq.rqdata.warn_multi_bb = True
1342
1343 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1344
1345
1346 def getAllKeysWithFlags(self, flaglist):
1347 dump = {}
1348 for k in self.data.keys():
1349 try:
1350 v = self.data.getVar(k, True)
1351 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1352 dump[k] = {
1353 'v' : v ,
1354 'history' : self.data.varhistory.variable(k),
1355 }
1356 for d in flaglist:
1357 dump[k][d] = self.data.getVarFlag(k, d)
1358 except Exception as e:
1359 print(e)
1360 return dump
1361
1362
1363 def generateNewImage(self, image, base_image, package_queue, timestamp, description):
1364 '''
1365 Create a new image with a "require"/"inherit" base_image statement
1366 '''
1367 if timestamp:
1368 image_name = os.path.splitext(image)[0]
1369 timestr = time.strftime("-%Y%m%d-%H%M%S")
1370 dest = image_name + str(timestr) + ".bb"
1371 else:
1372 if not image.endswith(".bb"):
1373 dest = image + ".bb"
1374 else:
1375 dest = image
1376
1377 basename = False
1378 if base_image:
1379 with open(base_image, 'r') as f:
1380 require_line = f.readline()
1381 p = re.compile("IMAGE_BASENAME *=")
1382 for line in f:
1383 if p.search(line):
1384 basename = True
1385
1386 with open(dest, "w") as imagefile:
1387 if base_image is None:
1388 imagefile.write("inherit core-image\n")
1389 else:
1390 topdir = self.data.getVar("TOPDIR")
1391 if topdir in base_image:
1392 base_image = require_line.split()[1]
1393 imagefile.write("require " + base_image + "\n")
1394 image_install = "IMAGE_INSTALL = \""
1395 for package in package_queue:
1396 image_install += str(package) + " "
1397 image_install += "\"\n"
1398 imagefile.write(image_install)
1399
1400 description_var = "DESCRIPTION = \"" + description + "\"\n"
1401 imagefile.write(description_var)
1402
1403 if basename:
1404 # If this is overwritten in a inherited image, reset it to default
1405 image_basename = "IMAGE_BASENAME = \"${PN}\"\n"
1406 imagefile.write(image_basename)
1407
1408 self.state = state.initial
1409 if timestamp:
1410 return timestr
1411
1412 # This is called for all async commands when self.state != running
1413 def updateCache(self):
1414 if self.state == state.running:
1415 return
1416
1417 if self.state in (state.shutdown, state.forceshutdown, state.error):
1418 if hasattr(self.parser, 'shutdown'):
1419 self.parser.shutdown(clean=False, force = True)
1420 raise bb.BBHandledException()
1421
1422 if self.state != state.parsing:
1423
1424 # reload files for which we got notifications
1425 for p in self.inotify_modified_files:
1426 bb.parse.update_cache(p)
1427 self.inotify_modified_files = []
1428
1429 if not self.baseconfig_valid:
1430 logger.debug(1, "Reloading base configuration data")
1431 self.initConfigurationData()
1432 self.baseconfig_valid = True
1433 self.parsecache_valid = False
1434
1435 if self.state != state.parsing and not self.parsecache_valid:
1436 self.parseConfiguration ()
1437 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1438 bb.event.fire(bb.event.SanityCheck(False), self.data)
1439
1440 ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
1441 self.recipecache.ignored_dependencies = set(ignore.split())
1442
1443 for dep in self.configuration.extra_assume_provided:
1444 self.recipecache.ignored_dependencies.add(dep)
1445
1446 self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
1447 (filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data)
1448
1449 self.data.renameVar("__depends", "__base_depends")
1450 self.add_filewatch(self.data.getVar("__base_depends"), self.configwatcher)
1451
1452 self.parser = CookerParser(self, filelist, masked)
1453 self.parsecache_valid = True
1454
1455 self.state = state.parsing
1456
1457 if not self.parser.parse_next():
1458 collectlog.debug(1, "parsing complete")
1459 if self.parser.error:
1460 raise bb.BBHandledException()
1461 self.show_appends_with_no_recipes()
1462 self.handlePrefProviders()
1463 self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
1464 self.state = state.running
1465 return None
1466
1467 return True
1468
1469 def checkPackages(self, pkgs_to_build):
1470
1471 # Return a copy, don't modify the original
1472 pkgs_to_build = pkgs_to_build[:]
1473
1474 if len(pkgs_to_build) == 0:
1475 raise NothingToBuild
1476
1477 ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split()
1478 for pkg in pkgs_to_build:
1479 if pkg in ignore:
1480 parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1481
1482 if 'world' in pkgs_to_build:
1483 self.buildWorldTargetList()
1484 pkgs_to_build.remove('world')
1485 for t in self.recipecache.world_target:
1486 pkgs_to_build.append(t)
1487
1488 if 'universe' in pkgs_to_build:
1489 parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
1490 parselog.debug(1, "collating packages for \"universe\"")
1491 pkgs_to_build.remove('universe')
1492 for t in self.recipecache.universe_target:
1493 pkgs_to_build.append(t)
1494
1495 return pkgs_to_build
1496
1497
1498
1499
1500 def pre_serve(self):
1501 # Empty the environment. The environment will be populated as
1502 # necessary from the data store.
1503 #bb.utils.empty_environment()
1504 try:
1505 self.prhost = prserv.serv.auto_start(self.data)
1506 except prserv.serv.PRServiceConfigError:
1507 bb.event.fire(CookerExit(), self.event_data)
1508 self.state = state.error
1509 return
1510
1511 def post_serve(self):
1512 prserv.serv.auto_shutdown(self.data)
1513 bb.event.fire(CookerExit(), self.event_data)
1514
1515 def shutdown(self, force = False):
1516 if force:
1517 self.state = state.forceshutdown
1518 else:
1519 self.state = state.shutdown
1520
1521 def finishcommand(self):
1522 self.state = state.initial
1523
1524 def reset(self):
1525 self.initConfigurationData()
1526
1527def server_main(cooker, func, *args):
1528 cooker.pre_serve()
1529
1530 if cooker.configuration.profile:
1531 try:
1532 import cProfile as profile
1533 except:
1534 import profile
1535 prof = profile.Profile()
1536
1537 ret = profile.Profile.runcall(prof, func, *args)
1538
1539 prof.dump_stats("profile.log")
1540 bb.utils.process_profilelog("profile.log")
1541 print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
1542
1543 else:
1544 ret = func(*args)
1545
1546 cooker.post_serve()
1547
1548 return ret
1549
1550class CookerExit(bb.event.Event):
1551 """
1552 Notify clients of the Cooker shutdown
1553 """
1554
1555 def __init__(self):
1556 bb.event.Event.__init__(self)
1557
1558
1559class CookerCollectFiles(object):
1560 def __init__(self, priorities):
1561 self.appendlist = {}
1562 self.appliedappendlist = []
1563 self.bbfile_config_priorities = priorities
1564
1565 def calc_bbfile_priority( self, filename, matched = None ):
1566 for _, _, regex, pri in self.bbfile_config_priorities:
1567 if regex.match(filename):
1568 if matched != None:
1569 if not regex in matched:
1570 matched.add(regex)
1571 return pri
1572 return 0
1573
1574 def get_bbfiles(self):
1575 """Get list of default .bb files by reading out the current directory"""
1576 path = os.getcwd()
1577 contents = os.listdir(path)
1578 bbfiles = []
1579 for f in contents:
1580 if f.endswith(".bb"):
1581 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1582 return bbfiles
1583
1584 def find_bbfiles(self, path):
1585 """Find all the .bb and .bbappend files in a directory"""
1586 found = []
1587 for dir, dirs, files in os.walk(path):
1588 for ignored in ('SCCS', 'CVS', '.svn'):
1589 if ignored in dirs:
1590 dirs.remove(ignored)
1591 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1592
1593 return found
1594
1595 def collect_bbfiles(self, config, eventdata):
1596 """Collect all available .bb build files"""
1597 masked = 0
1598
1599 collectlog.debug(1, "collecting .bb files")
1600
1601 files = (config.getVar( "BBFILES", True) or "").split()
1602 config.setVar("BBFILES", " ".join(files))
1603
1604 # Sort files by priority
1605 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1606
1607 if not len(files):
1608 files = self.get_bbfiles()
1609
1610 if not len(files):
1611 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1612 bb.event.fire(CookerExit(), eventdata)
1613
1614 # Can't use set here as order is important
1615 newfiles = []
1616 for f in files:
1617 if os.path.isdir(f):
1618 dirfiles = self.find_bbfiles(f)
1619 for g in dirfiles:
1620 if g not in newfiles:
1621 newfiles.append(g)
1622 else:
1623 globbed = glob.glob(f)
1624 if not globbed and os.path.exists(f):
1625 globbed = [f]
1626 for g in globbed:
1627 if g not in newfiles:
1628 newfiles.append(g)
1629
1630 bbmask = config.getVar('BBMASK', True)
1631
1632 if bbmask:
1633 try:
1634 bbmask_compiled = re.compile(bbmask)
1635 except sre_constants.error:
1636 collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
1637 return list(newfiles), 0
1638
1639 bbfiles = []
1640 bbappend = []
1641 for f in newfiles:
1642 if bbmask and bbmask_compiled.search(f):
1643 collectlog.debug(1, "skipping masked file %s", f)
1644 masked += 1
1645 continue
1646 if f.endswith('.bb'):
1647 bbfiles.append(f)
1648 elif f.endswith('.bbappend'):
1649 bbappend.append(f)
1650 else:
1651 collectlog.debug(1, "skipping %s: unknown file extension", f)
1652
1653 # Build a list of .bbappend files for each .bb file
1654 for f in bbappend:
1655 base = os.path.basename(f).replace('.bbappend', '.bb')
1656 if not base in self.appendlist:
1657 self.appendlist[base] = []
1658 if f not in self.appendlist[base]:
1659 self.appendlist[base].append(f)
1660
1661 # Find overlayed recipes
1662 # bbfiles will be in priority order which makes this easy
1663 bbfile_seen = dict()
1664 self.overlayed = defaultdict(list)
1665 for f in reversed(bbfiles):
1666 base = os.path.basename(f)
1667 if base not in bbfile_seen:
1668 bbfile_seen[base] = f
1669 else:
1670 topfile = bbfile_seen[base]
1671 self.overlayed[topfile].append(f)
1672
1673 return (bbfiles, masked)
1674
1675 def get_file_appends(self, fn):
1676 """
1677 Returns a list of .bbappend files to apply to fn
1678 """
1679 filelist = []
1680 f = os.path.basename(fn)
1681 for bbappend in self.appendlist:
1682 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1683 self.appliedappendlist.append(bbappend)
1684 for filename in self.appendlist[bbappend]:
1685 filelist.append(filename)
1686 return filelist
1687
1688 def collection_priorities(self, pkgfns):
1689
1690 priorities = {}
1691
1692 # Calculate priorities for each file
1693 matched = set()
1694 for p in pkgfns:
1695 realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
1696 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1697
1698 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1699 unmatched = set()
1700 for _, _, regex, pri in self.bbfile_config_priorities:
1701 if not regex in matched:
1702 unmatched.add(regex)
1703
1704 def findmatch(regex):
1705 for bbfile in self.appendlist:
1706 for append in self.appendlist[bbfile]:
1707 if regex.match(append):
1708 return True
1709 return False
1710
1711 for unmatch in unmatched.copy():
1712 if findmatch(unmatch):
1713 unmatched.remove(unmatch)
1714
1715 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1716 if regex in unmatched:
1717 collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
1718
1719 return priorities
1720
1721class ParsingFailure(Exception):
1722 def __init__(self, realexception, recipe):
1723 self.realexception = realexception
1724 self.recipe = recipe
1725 Exception.__init__(self, realexception, recipe)
1726
1727class Feeder(multiprocessing.Process):
1728 def __init__(self, jobs, to_parsers, quit):
1729 self.quit = quit
1730 self.jobs = jobs
1731 self.to_parsers = to_parsers
1732 multiprocessing.Process.__init__(self)
1733
1734 def run(self):
1735 while True:
1736 try:
1737 quit = self.quit.get_nowait()
1738 except Queue.Empty:
1739 pass
1740 else:
1741 if quit == 'cancel':
1742 self.to_parsers.cancel_join_thread()
1743 break
1744
1745 try:
1746 job = self.jobs.pop()
1747 except IndexError:
1748 break
1749
1750 try:
1751 self.to_parsers.put(job, timeout=0.5)
1752 except Queue.Full:
1753 self.jobs.insert(0, job)
1754 continue
1755
1756class Parser(multiprocessing.Process):
1757 def __init__(self, jobs, results, quit, init, profile):
1758 self.jobs = jobs
1759 self.results = results
1760 self.quit = quit
1761 self.init = init
1762 multiprocessing.Process.__init__(self)
1763 self.context = bb.utils.get_context().copy()
1764 self.handlers = bb.event.get_class_handlers().copy()
1765 self.profile = profile
1766
1767 def run(self):
1768
1769 if not self.profile:
1770 self.realrun()
1771 return
1772
1773 try:
1774 import cProfile as profile
1775 except:
1776 import profile
1777 prof = profile.Profile()
1778 try:
1779 profile.Profile.runcall(prof, self.realrun)
1780 finally:
1781 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1782 prof.dump_stats(logfile)
1783 bb.utils.process_profilelog(logfile)
1784 print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile))
1785
1786 def realrun(self):
1787 if self.init:
1788 self.init()
1789
1790 pending = []
1791 while True:
1792 try:
1793 self.quit.get_nowait()
1794 except Queue.Empty:
1795 pass
1796 else:
1797 self.results.cancel_join_thread()
1798 break
1799
1800 if pending:
1801 result = pending.pop()
1802 else:
1803 try:
1804 job = self.jobs.get(timeout=0.25)
1805 except Queue.Empty:
1806 continue
1807
1808 if job is None:
1809 break
1810 result = self.parse(*job)
1811
1812 try:
1813 self.results.put(result, timeout=0.25)
1814 except Queue.Full:
1815 pending.append(result)
1816
1817 def parse(self, filename, appends, caches_array):
1818 try:
1819 # Reset our environment and handlers to the original settings
1820 bb.utils.set_context(self.context.copy())
1821 bb.event.set_class_handlers(self.handlers.copy())
1822 return True, bb.cache.Cache.parse(filename, appends, self.cfg, caches_array)
1823 except Exception as exc:
1824 tb = sys.exc_info()[2]
1825 exc.recipe = filename
1826 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
1827 return True, exc
1828 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
1829 # and for example a worker thread doesn't just exit on its own in response to
1830 # a SystemExit event for example.
1831 except BaseException as exc:
1832 return True, ParsingFailure(exc, filename)
1833
1834class CookerParser(object):
1835 def __init__(self, cooker, filelist, masked):
1836 self.filelist = filelist
1837 self.cooker = cooker
1838 self.cfgdata = cooker.data
1839 self.cfghash = cooker.data_hash
1840
1841 # Accounting statistics
1842 self.parsed = 0
1843 self.cached = 0
1844 self.error = 0
1845 self.masked = masked
1846
1847 self.skipped = 0
1848 self.virtuals = 0
1849 self.total = len(filelist)
1850
1851 self.current = 0
1852 self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
1853 multiprocessing.cpu_count())
1854
1855 self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
1856 self.fromcache = []
1857 self.willparse = []
1858 for filename in self.filelist:
1859 appends = self.cooker.collection.get_file_appends(filename)
1860 if not self.bb_cache.cacheValid(filename, appends):
1861 self.willparse.append((filename, appends, cooker.caches_array))
1862 else:
1863 self.fromcache.append((filename, appends))
1864 self.toparse = self.total - len(self.fromcache)
1865 self.progress_chunk = max(self.toparse / 100, 1)
1866
1867 self.start()
1868 self.haveshutdown = False
1869
1870 def start(self):
1871 self.results = self.load_cached()
1872 self.processes = []
1873 if self.toparse:
1874 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
1875 def init():
1876 Parser.cfg = self.cfgdata
1877 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
1878 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1)
1879
1880 self.feeder_quit = multiprocessing.Queue(maxsize=1)
1881 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
1882 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
1883 self.result_queue = multiprocessing.Queue()
1884 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
1885 self.feeder.start()
1886 for i in range(0, self.num_processes):
1887 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
1888 parser.start()
1889 self.processes.append(parser)
1890
1891 self.results = itertools.chain(self.results, self.parse_generator())
1892
1893 def shutdown(self, clean=True, force=False):
1894 if not self.toparse:
1895 return
1896 if self.haveshutdown:
1897 return
1898 self.haveshutdown = True
1899
1900 if clean:
1901 event = bb.event.ParseCompleted(self.cached, self.parsed,
1902 self.skipped, self.masked,
1903 self.virtuals, self.error,
1904 self.total)
1905
1906 bb.event.fire(event, self.cfgdata)
1907 self.feeder_quit.put(None)
1908 for process in self.processes:
1909 self.jobs.put(None)
1910 else:
1911 self.feeder_quit.put('cancel')
1912
1913 self.parser_quit.cancel_join_thread()
1914 for process in self.processes:
1915 self.parser_quit.put(None)
1916
1917 self.jobs.cancel_join_thread()
1918
1919 for process in self.processes:
1920 if force:
1921 process.join(.1)
1922 process.terminate()
1923 else:
1924 process.join()
1925 self.feeder.join()
1926
1927 sync = threading.Thread(target=self.bb_cache.sync)
1928 sync.start()
1929 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
1930 bb.codeparser.parser_cache_savemerge(self.cooker.data)
1931 bb.fetch.fetcher_parse_done(self.cooker.data)
1932
1933 def load_cached(self):
1934 for filename, appends in self.fromcache:
1935 cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
1936 yield not cached, infos
1937
1938 def parse_generator(self):
1939 while True:
1940 if self.parsed >= self.toparse:
1941 break
1942
1943 try:
1944 result = self.result_queue.get(timeout=0.25)
1945 except Queue.Empty:
1946 pass
1947 else:
1948 value = result[1]
1949 if isinstance(value, BaseException):
1950 raise value
1951 else:
1952 yield result
1953
1954 def parse_next(self):
1955 result = []
1956 parsed = None
1957 try:
1958 parsed, result = self.results.next()
1959 except StopIteration:
1960 self.shutdown()
1961 return False
1962 except bb.BBHandledException as exc:
1963 self.error += 1
1964 logger.error('Failed to parse recipe: %s' % exc.recipe)
1965 self.shutdown(clean=False)
1966 return False
1967 except ParsingFailure as exc:
1968 self.error += 1
1969 logger.error('Unable to parse %s: %s' %
1970 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
1971 self.shutdown(clean=False)
1972 return False
1973 except bb.parse.ParseError as exc:
1974 self.error += 1
1975 logger.error(str(exc))
1976 self.shutdown(clean=False)
1977 return False
1978 except bb.data_smart.ExpansionError as exc:
1979 self.error += 1
1980 _, value, _ = sys.exc_info()
1981 logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
1982 self.shutdown(clean=False)
1983 return False
1984 except SyntaxError as exc:
1985 self.error += 1
1986 logger.error('Unable to parse %s', exc.recipe)
1987 self.shutdown(clean=False)
1988 return False
1989 except Exception as exc:
1990 self.error += 1
1991 etype, value, tb = sys.exc_info()
1992 if hasattr(value, "recipe"):
1993 logger.error('Unable to parse %s', value.recipe,
1994 exc_info=(etype, value, exc.traceback))
1995 else:
1996 # Most likely, an exception occurred during raising an exception
1997 import traceback
1998 logger.error('Exception during parse: %s' % traceback.format_exc())
1999 self.shutdown(clean=False)
2000 return False
2001
2002 self.current += 1
2003 self.virtuals += len(result)
2004 if parsed:
2005 self.parsed += 1
2006 if self.parsed % self.progress_chunk == 0:
2007 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2008 self.cfgdata)
2009 else:
2010 self.cached += 1
2011
2012 for virtualfn, info_array in result:
2013 if info_array[0].skipped:
2014 self.skipped += 1
2015 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2016 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
2017 parsed=parsed, watcher = self.cooker.add_filewatch)
2018 return True
2019
2020 def reparse(self, filename):
2021 infos = self.bb_cache.parse(filename,
2022 self.cooker.collection.get_file_appends(filename),
2023 self.cfgdata, self.cooker.caches_array)
2024 for vfn, info_array in infos:
2025 self.cooker.recipecache.add_from_recipeinfo(vfn, info_array)
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
new file mode 100644
index 0000000000..2ceed2d867
--- /dev/null
+++ b/bitbake/lib/bb/cookerdata.py
@@ -0,0 +1,320 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import os, sys
26from functools import wraps
27import logging
28import bb
29from bb import data
30import bb.parse
31
32logger = logging.getLogger("BitBake")
33parselog = logging.getLogger("BitBake.Parsing")
34
35class ConfigParameters(object):
36 def __init__(self):
37 self.options, targets = self.parseCommandLine()
38 self.environment = self.parseEnvironment()
39
40 self.options.pkgs_to_build = targets or []
41
42 self.options.tracking = False
43 if hasattr(self.options, "show_environment") and self.options.show_environment:
44 self.options.tracking = True
45
46 for key, val in self.options.__dict__.items():
47 setattr(self, key, val)
48
49 def parseCommandLine(self):
50 raise Exception("Caller must implement commandline option parsing")
51
52 def parseEnvironment(self):
53 return os.environ.copy()
54
55 def updateFromServer(self, server):
56 if not self.options.cmd:
57 defaulttask, error = server.runCommand(["getVariable", "BB_DEFAULT_TASK"])
58 if error:
59 raise Exception("Unable to get the value of BB_DEFAULT_TASK from the server: %s" % error)
60 self.options.cmd = defaulttask or "build"
61 _, error = server.runCommand(["setConfig", "cmd", self.options.cmd])
62 if error:
63 raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error)
64
65 if not self.options.pkgs_to_build:
66 bbpkgs, error = server.runCommand(["getVariable", "BBPKGS"])
67 if error:
68 raise Exception("Unable to get the value of BBPKGS from the server: %s" % error)
69 if bbpkgs:
70 self.options.pkgs_to_build.extend(bbpkgs.split())
71
72 def updateToServer(self, server):
73 options = {}
74 for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
75 "verbose", "debug", "dry_run", "dump_signatures",
76 "debug_domains", "extra_assume_provided", "profile"]:
77 options[o] = getattr(self.options, o)
78
79 ret, error = server.runCommand(["updateConfig", options])
80 if error:
81 raise Exception("Unable to update the server configuration with local parameters: %s" % error)
82
83 def parseActions(self):
84 # Parse any commandline into actions
85 action = {'action':None, 'msg':None}
86 if self.options.show_environment:
87 if 'world' in self.options.pkgs_to_build:
88 action['msg'] = "'world' is not a valid target for --environment."
89 elif 'universe' in self.options.pkgs_to_build:
90 action['msg'] = "'universe' is not a valid target for --environment."
91 elif len(self.options.pkgs_to_build) > 1:
92 action['msg'] = "Only one target can be used with the --environment option."
93 elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
94 action['msg'] = "No target should be used with the --environment and --buildfile options."
95 elif len(self.options.pkgs_to_build) > 0:
96 action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
97 else:
98 action['action'] = ["showEnvironment", self.options.buildfile]
99 elif self.options.buildfile is not None:
100 action['action'] = ["buildFile", self.options.buildfile, self.options.cmd]
101 elif self.options.revisions_changed:
102 action['action'] = ["compareRevisions"]
103 elif self.options.show_versions:
104 action['action'] = ["showVersions"]
105 elif self.options.parse_only:
106 action['action'] = ["parseFiles"]
107 elif self.options.dot_graph:
108 if self.options.pkgs_to_build:
109 action['action'] = ["generateDotGraph", self.options.pkgs_to_build, self.options.cmd]
110 else:
111 action['msg'] = "Please specify a package name for dependency graph generation."
112 else:
113 if self.options.pkgs_to_build:
114 action['action'] = ["buildTargets", self.options.pkgs_to_build, self.options.cmd]
115 else:
116 #action['msg'] = "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information."
117 action = None
118 self.options.initialaction = action
119 return action
120
121class CookerConfiguration(object):
122 """
123 Manages build options and configurations for one run
124 """
125
126 def __init__(self):
127 self.debug_domains = []
128 self.extra_assume_provided = []
129 self.prefile = []
130 self.postfile = []
131 self.debug = 0
132 self.cmd = None
133 self.abort = True
134 self.force = False
135 self.profile = False
136 self.nosetscene = False
137 self.invalidate_stamp = False
138 self.dump_signatures = []
139 self.dry_run = False
140 self.tracking = False
141 self.interface = []
142 self.writeeventlog = False
143
144 self.env = {}
145
146 def setConfigParameters(self, parameters):
147 for key in self.__dict__.keys():
148 if key in parameters.options.__dict__:
149 setattr(self, key, parameters.options.__dict__[key])
150 self.env = parameters.environment.copy()
151 self.tracking = parameters.tracking
152
153 def setServerRegIdleCallback(self, srcb):
154 self.server_register_idlecallback = srcb
155
156 def __getstate__(self):
157 state = {}
158 for key in self.__dict__.keys():
159 if key == "server_register_idlecallback":
160 state[key] = None
161 else:
162 state[key] = getattr(self, key)
163 return state
164
165 def __setstate__(self,state):
166 for k in state:
167 setattr(self, k, state[k])
168
169
170def catch_parse_error(func):
171 """Exception handling bits for our parsing"""
172 @wraps(func)
173 def wrapped(fn, *args):
174 try:
175 return func(fn, *args)
176 except (IOError, bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
177 import traceback
178 parselog.critical( traceback.format_exc())
179 parselog.critical("Unable to parse %s: %s" % (fn, exc))
180 sys.exit(1)
181 return wrapped
182
183@catch_parse_error
184def parse_config_file(fn, data, include=True):
185 return bb.parse.handle(fn, data, include)
186
187@catch_parse_error
188def _inherit(bbclass, data):
189 bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
190 return data
191
192def findConfigFile(configfile, data):
193 search = []
194 bbpath = data.getVar("BBPATH", True)
195 if bbpath:
196 for i in bbpath.split(":"):
197 search.append(os.path.join(i, "conf", configfile))
198 path = os.getcwd()
199 while path != "/":
200 search.append(os.path.join(path, "conf", configfile))
201 path, _ = os.path.split(path)
202
203 for i in search:
204 if os.path.exists(i):
205 return i
206
207 return None
208
209class CookerDataBuilder(object):
210
211 def __init__(self, cookercfg, worker = False):
212
213 self.prefiles = cookercfg.prefile
214 self.postfiles = cookercfg.postfile
215 self.tracking = cookercfg.tracking
216
217 bb.utils.set_context(bb.utils.clean_context())
218 bb.event.set_class_handlers(bb.event.clean_class_handlers())
219 self.data = bb.data.init()
220 if self.tracking:
221 self.data.enableTracking()
222
223 # Keep a datastore of the initial environment variables and their
224 # values from when BitBake was launched to enable child processes
225 # to use environment variables which have been cleaned from the
226 # BitBake processes env
227 self.savedenv = bb.data.init()
228 for k in cookercfg.env:
229 self.savedenv.setVar(k, cookercfg.env[k])
230
231 filtered_keys = bb.utils.approved_variables()
232 bb.data.inheritFromOS(self.data, self.savedenv, filtered_keys)
233 self.data.setVar("BB_ORIGENV", self.savedenv)
234
235 if worker:
236 self.data.setVar("BB_WORKERCONTEXT", "1")
237
238 def parseBaseConfiguration(self):
239 try:
240 self.parseConfigurationFiles(self.prefiles, self.postfiles)
241 except SyntaxError:
242 raise bb.BBHandledException
243 except bb.data_smart.ExpansionError as e:
244 logger.error(str(e))
245 raise bb.BBHandledException
246 except Exception:
247 logger.exception("Error parsing configuration files")
248 raise bb.BBHandledException
249
250 def _findLayerConf(self, data):
251 return findConfigFile("bblayers.conf", data)
252
253 def parseConfigurationFiles(self, prefiles, postfiles):
254 data = self.data
255 bb.parse.init_parser(data)
256
257 # Parse files for loading *before* bitbake.conf and any includes
258 for f in prefiles:
259 data = parse_config_file(f, data)
260
261 layerconf = self._findLayerConf(data)
262 if layerconf:
263 parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
264 # By definition bblayers.conf is in conf/ of TOPDIR.
265 # We may have been called with cwd somewhere else so reset TOPDIR
266 data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
267 data = parse_config_file(layerconf, data)
268
269 layers = (data.getVar('BBLAYERS', True) or "").split()
270
271 data = bb.data.createCopy(data)
272 for layer in layers:
273 parselog.debug(2, "Adding layer %s", layer)
274 data.setVar('LAYERDIR', layer)
275 data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
276 data.expandVarref('LAYERDIR')
277
278 data.delVar('LAYERDIR')
279
280 if not data.getVar("BBPATH", True):
281 msg = "The BBPATH variable is not set"
282 if not layerconf:
283 msg += (" and bitbake did not find a conf/bblayers.conf file in"
284 " the expected location.\nMaybe you accidentally"
285 " invoked bitbake from the wrong directory?")
286 raise SystemExit(msg)
287
288 data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
289
290 # Parse files for loading *after* bitbake.conf and any includes
291 for p in postfiles:
292 data = parse_config_file(p, data)
293
294 # Handle any INHERITs and inherit the base class
295 bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split()
296 for bbclass in bbclasses:
297 data = _inherit(bbclass, data)
298
299 # Nomally we only register event handlers at the end of parsing .bb files
300 # We register any handlers we've found so far here...
301 for var in data.getVar('__BBHANDLERS') or []:
302 bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split())
303
304 if data.getVar("BB_WORKERCONTEXT", False) is None:
305 bb.fetch.fetcher_init(data)
306 bb.codeparser.parser_cache_init(data)
307 bb.event.fire(bb.event.ConfigParsed(), data)
308
309 if data.getVar("BB_INVALIDCONF") is True:
310 data.setVar("BB_INVALIDCONF", False)
311 self.parseConfigurationFiles(self.prefiles, self.postfiles)
312 return
313
314 bb.parse.init_parser(data)
315 data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
316 self.data = data
317 self.data_hash = data.get_hash()
318
319
320
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
new file mode 100644
index 0000000000..346a618582
--- /dev/null
+++ b/bitbake/lib/bb/daemonize.py
@@ -0,0 +1,193 @@
1"""
2Python Daemonizing helper
3
4Configurable daemon behaviors:
5
6 1.) The current working directory set to the "/" directory.
7 2.) The current file creation mode mask set to 0.
8 3.) Close all open files (1024).
9 4.) Redirect standard I/O streams to "/dev/null".
10
11A failed call to fork() now raises an exception.
12
13References:
14 1) Advanced Programming in the Unix Environment: W. Richard Stevens
15 http://www.apuebook.com/apue3e.html
16 2) The Linux Programming Interface: Michael Kerrisk
17 http://man7.org/tlpi/index.html
18 3) Unix Programming Frequently Asked Questions:
19 http://www.faqs.org/faqs/unix-faq/programmer/faq/
20
21Modified to allow a function to be daemonized and return for
22bitbake use by Richard Purdie
23"""
24
25__author__ = "Chad J. Schroeder"
26__copyright__ = "Copyright (C) 2005 Chad J. Schroeder"
27__version__ = "0.2"
28
29# Standard Python modules.
30import os # Miscellaneous OS interfaces.
31import sys # System-specific parameters and functions.
32
33# Default daemon parameters.
34# File mode creation mask of the daemon.
35# For BitBake's children, we do want to inherit the parent umask.
36UMASK = None
37
38# Default maximum for the number of available file descriptors.
39MAXFD = 1024
40
41# The standard I/O file descriptors are redirected to /dev/null by default.
42if (hasattr(os, "devnull")):
43 REDIRECT_TO = os.devnull
44else:
45 REDIRECT_TO = "/dev/null"
46
47def createDaemon(function, logfile):
48 """
49 Detach a process from the controlling terminal and run it in the
50 background as a daemon, returning control to the caller.
51 """
52
53 try:
54 # Fork a child process so the parent can exit. This returns control to
55 # the command-line or shell. It also guarantees that the child will not
56 # be a process group leader, since the child receives a new process ID
57 # and inherits the parent's process group ID. This step is required
58 # to insure that the next call to os.setsid is successful.
59 pid = os.fork()
60 except OSError as e:
61 raise Exception("%s [%d]" % (e.strerror, e.errno))
62
63 if (pid == 0): # The first child.
64 # To become the session leader of this new session and the process group
65 # leader of the new process group, we call os.setsid(). The process is
66 # also guaranteed not to have a controlling terminal.
67 os.setsid()
68
69 # Is ignoring SIGHUP necessary?
70 #
71 # It's often suggested that the SIGHUP signal should be ignored before
72 # the second fork to avoid premature termination of the process. The
73 # reason is that when the first child terminates, all processes, e.g.
74 # the second child, in the orphaned group will be sent a SIGHUP.
75 #
76 # "However, as part of the session management system, there are exactly
77 # two cases where SIGHUP is sent on the death of a process:
78 #
79 # 1) When the process that dies is the session leader of a session that
80 # is attached to a terminal device, SIGHUP is sent to all processes
81 # in the foreground process group of that terminal device.
82 # 2) When the death of a process causes a process group to become
83 # orphaned, and one or more processes in the orphaned group are
84 # stopped, then SIGHUP and SIGCONT are sent to all members of the
85 # orphaned group." [2]
86 #
87 # The first case can be ignored since the child is guaranteed not to have
88 # a controlling terminal. The second case isn't so easy to dismiss.
89 # The process group is orphaned when the first child terminates and
90 # POSIX.1 requires that every STOPPED process in an orphaned process
91 # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the
92 # second child is not STOPPED though, we can safely forego ignoring the
93 # SIGHUP signal. In any case, there are no ill-effects if it is ignored.
94 #
95 # import signal # Set handlers for asynchronous events.
96 # signal.signal(signal.SIGHUP, signal.SIG_IGN)
97
98 try:
99 # Fork a second child and exit immediately to prevent zombies. This
100 # causes the second child process to be orphaned, making the init
101 # process responsible for its cleanup. And, since the first child is
102 # a session leader without a controlling terminal, it's possible for
103 # it to acquire one by opening a terminal in the future (System V-
104 # based systems). This second fork guarantees that the child is no
105 # longer a session leader, preventing the daemon from ever acquiring
106 # a controlling terminal.
107 pid = os.fork() # Fork a second child.
108 except OSError as e:
109 raise Exception("%s [%d]" % (e.strerror, e.errno))
110
111 if (pid == 0): # The second child.
112 # We probably don't want the file mode creation mask inherited from
113 # the parent, so we give the child complete control over permissions.
114 if UMASK is not None:
115 os.umask(UMASK)
116 else:
117 # Parent (the first child) of the second child.
118 os._exit(0)
119 else:
120 # exit() or _exit()?
121 # _exit is like exit(), but it doesn't call any functions registered
122 # with atexit (and on_exit) or any registered signal handlers. It also
123 # closes any open file descriptors. Using exit() may cause all stdio
124 # streams to be flushed twice and any temporary files may be unexpectedly
125 # removed. It's therefore recommended that child branches of a fork()
126 # and the parent branch(es) of a daemon use _exit().
127 return
128
129 # Close all open file descriptors. This prevents the child from keeping
130 # open any file descriptors inherited from the parent. There is a variety
131 # of methods to accomplish this task. Three are listed below.
132 #
133 # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
134 # number of open file descriptors to close. If it doesn't exist, use
135 # the default value (configurable).
136 #
137 # try:
138 # maxfd = os.sysconf("SC_OPEN_MAX")
139 # except (AttributeError, ValueError):
140 # maxfd = MAXFD
141 #
142 # OR
143 #
144 # if (os.sysconf_names.has_key("SC_OPEN_MAX")):
145 # maxfd = os.sysconf("SC_OPEN_MAX")
146 # else:
147 # maxfd = MAXFD
148 #
149 # OR
150 #
151 # Use the getrlimit method to retrieve the maximum file descriptor number
152 # that can be opened by this process. If there is no limit on the
153 # resource, use the default value.
154 #
155 import resource # Resource usage information.
156 maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
157 if (maxfd == resource.RLIM_INFINITY):
158 maxfd = MAXFD
159
160 # Iterate through and close all file descriptors.
161# for fd in range(0, maxfd):
162# try:
163# os.close(fd)
164# except OSError: # ERROR, fd wasn't open to begin with (ignored)
165# pass
166
167 # Redirect the standard I/O file descriptors to the specified file. Since
168 # the daemon has no controlling terminal, most daemons redirect stdin,
169 # stdout, and stderr to /dev/null. This is done to prevent side-effects
170 # from reads and writes to the standard I/O file descriptors.
171
172 # This call to open is guaranteed to return the lowest file descriptor,
173 # which will be 0 (stdin), since it was closed above.
174# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0)
175
176 # Duplicate standard input to standard output and standard error.
177# os.dup2(0, 1) # standard output (1)
178# os.dup2(0, 2) # standard error (2)
179
180
181 si = file('/dev/null', 'r')
182 so = file(logfile, 'w')
183 se = so
184
185
186 # Replace those fds with our own
187 os.dup2(si.fileno(), sys.stdin.fileno())
188 os.dup2(so.fileno(), sys.stdout.fileno())
189 os.dup2(se.fileno(), sys.stderr.fileno())
190
191 function()
192
193 os._exit(0)
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
new file mode 100644
index 0000000000..82eefef1a6
--- /dev/null
+++ b/bitbake/lib/bb/data.py
@@ -0,0 +1,446 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Data' implementations
5
6Functions for interacting with the data structure used by the
7BitBake build tools.
8
9The expandKeys and update_data are the most expensive
10operations. At night the cookie monster came by and
11suggested 'give me cookies on setting the variables and
12things will work out'. Taking this suggestion into account
13applying the skills from the not yet passed 'Entwurf und
14Analyse von Algorithmen' lecture and the cookie
15monster seems to be right. We will track setVar more carefully
16to have faster update_data and expandKeys operations.
17
18This is a trade-off between speed and memory again but
19the speed is more critical here.
20"""
21
22# Copyright (C) 2003, 2004 Chris Larson
23# Copyright (C) 2005 Holger Hans Peter Freyther
24#
25# This program is free software; you can redistribute it and/or modify
26# it under the terms of the GNU General Public License version 2 as
27# published by the Free Software Foundation.
28#
29# This program is distributed in the hope that it will be useful,
30# but WITHOUT ANY WARRANTY; without even the implied warranty of
31# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
32# GNU General Public License for more details.
33#
34# You should have received a copy of the GNU General Public License along
35# with this program; if not, write to the Free Software Foundation, Inc.,
36# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
37#
38# Based on functions from the base bb module, Copyright 2003 Holger Schurig
39
40import sys, os, re
41if sys.argv[0][-5:] == "pydoc":
42 path = os.path.dirname(os.path.dirname(sys.argv[1]))
43else:
44 path = os.path.dirname(os.path.dirname(sys.argv[0]))
45sys.path.insert(0, path)
46from itertools import groupby
47
48from bb import data_smart
49from bb import codeparser
50import bb
51
52logger = data_smart.logger
53_dict_type = data_smart.DataSmart
54
55def init():
56 """Return a new object representing the Bitbake data"""
57 return _dict_type()
58
59def init_db(parent = None):
60 """Return a new object representing the Bitbake data,
61 optionally based on an existing object"""
62 if parent is not None:
63 return parent.createCopy()
64 else:
65 return _dict_type()
66
67def createCopy(source):
68 """Link the source set to the destination
69 If one does not find the value in the destination set,
70 search will go on to the source set to get the value.
71 Value from source are copy-on-write. i.e. any try to
72 modify one of them will end up putting the modified value
73 in the destination set.
74 """
75 return source.createCopy()
76
77def initVar(var, d):
78 """Non-destructive var init for data structure"""
79 d.initVar(var)
80
81
82def setVar(var, value, d):
83 """Set a variable to a given value"""
84 d.setVar(var, value)
85
86
87def getVar(var, d, exp = 0):
88 """Gets the value of a variable"""
89 return d.getVar(var, exp)
90
91
92def renameVar(key, newkey, d):
93 """Renames a variable from key to newkey"""
94 d.renameVar(key, newkey)
95
96def delVar(var, d):
97 """Removes a variable from the data set"""
98 d.delVar(var)
99
100def appendVar(var, value, d):
101 """Append additional value to a variable"""
102 d.appendVar(var, value)
103
104def setVarFlag(var, flag, flagvalue, d):
105 """Set a flag for a given variable to a given value"""
106 d.setVarFlag(var, flag, flagvalue)
107
108def getVarFlag(var, flag, d):
109 """Gets given flag from given var"""
110 return d.getVarFlag(var, flag)
111
112def delVarFlag(var, flag, d):
113 """Removes a given flag from the variable's flags"""
114 d.delVarFlag(var, flag)
115
116def setVarFlags(var, flags, d):
117 """Set the flags for a given variable
118
119 Note:
120 setVarFlags will not clear previous
121 flags. Think of this method as
122 addVarFlags
123 """
124 d.setVarFlags(var, flags)
125
126def getVarFlags(var, d):
127 """Gets a variable's flags"""
128 return d.getVarFlags(var)
129
130def delVarFlags(var, d):
131 """Removes a variable's flags"""
132 d.delVarFlags(var)
133
134def keys(d):
135 """Return a list of keys in d"""
136 return d.keys()
137
138
139__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
140__expand_python_regexp__ = re.compile(r"\${@.+?}")
141
142def expand(s, d, varname = None):
143 """Variable expansion using the data store"""
144 return d.expand(s, varname)
145
146def expandKeys(alterdata, readdata = None):
147 if readdata == None:
148 readdata = alterdata
149
150 todolist = {}
151 for key in alterdata:
152 if not '${' in key:
153 continue
154
155 ekey = expand(key, readdata)
156 if key == ekey:
157 continue
158 todolist[key] = ekey
159
160 # These two for loops are split for performance to maximise the
161 # usefulness of the expand cache
162
163 for key in todolist:
164 ekey = todolist[key]
165 newval = alterdata.getVar(ekey, 0)
166 if newval:
167 val = alterdata.getVar(key, 0)
168 if val is not None and newval is not None:
169 bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
170 alterdata.renameVar(key, ekey)
171
172def inheritFromOS(d, savedenv, permitted):
173 """Inherit variables from the initial environment."""
174 exportlist = bb.utils.preserved_envvars_exported()
175 for s in savedenv.keys():
176 if s in permitted:
177 try:
178 d.setVar(s, getVar(s, savedenv, True), op = 'from env')
179 if s in exportlist:
180 d.setVarFlag(s, "export", True, op = 'auto env export')
181 except TypeError:
182 pass
183
184def emit_var(var, o=sys.__stdout__, d = init(), all=False):
185 """Emit a variable to be sourced by a shell."""
186 if getVarFlag(var, "python", d):
187 return 0
188
189 export = getVarFlag(var, "export", d)
190 unexport = getVarFlag(var, "unexport", d)
191 func = getVarFlag(var, "func", d)
192 if not all and not export and not unexport and not func:
193 return 0
194
195 try:
196 if all:
197 oval = getVar(var, d, 0)
198 val = getVar(var, d, 1)
199 except (KeyboardInterrupt, bb.build.FuncFailed):
200 raise
201 except Exception as exc:
202 o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
203 return 0
204
205 if all:
206 d.varhistory.emit(var, oval, val, o)
207
208 if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
209 return 0
210
211 varExpanded = expand(var, d)
212
213 if unexport:
214 o.write('unset %s\n' % varExpanded)
215 return 0
216
217 if val is None:
218 return 0
219
220 val = str(val)
221
222 if varExpanded.startswith("BASH_FUNC_"):
223 varExpanded = varExpanded[10:-2]
224 val = val[3:] # Strip off "() "
225 o.write("%s() %s\n" % (varExpanded, val))
226 o.write("export -f %s\n" % (varExpanded))
227 return 1
228
229 if func:
230 # NOTE: should probably check for unbalanced {} within the var
231 o.write("%s() {\n%s\n}\n" % (varExpanded, val))
232 return 1
233
234 if export:
235 o.write('export ')
236
237 # if we're going to output this within doublequotes,
238 # to a shell, we need to escape the quotes in the var
239 alter = re.sub('"', '\\"', val)
240 alter = re.sub('\n', ' \\\n', alter)
241 alter = re.sub('\\$', '\\\\$', alter)
242 o.write('%s="%s"\n' % (varExpanded, alter))
243 return 0
244
245def emit_env(o=sys.__stdout__, d = init(), all=False):
246 """Emits all items in the data store in a format such that it can be sourced by a shell."""
247
248 isfunc = lambda key: bool(d.getVarFlag(key, "func"))
249 keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
250 grouped = groupby(keys, isfunc)
251 for isfunc, keys in grouped:
252 for key in keys:
253 emit_var(key, o, d, all and not isfunc) and o.write('\n')
254
255def exported_keys(d):
256 return (key for key in d.keys() if not key.startswith('__') and
257 d.getVarFlag(key, 'export') and
258 not d.getVarFlag(key, 'unexport'))
259
260def exported_vars(d):
261 for key in exported_keys(d):
262 try:
263 value = d.getVar(key, True)
264 except Exception:
265 pass
266
267 if value is not None:
268 yield key, str(value)
269
270def emit_func(func, o=sys.__stdout__, d = init()):
271 """Emits all items in the data store in a format such that it can be sourced by a shell."""
272
273 keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func"))
274 for key in keys:
275 emit_var(key, o, d, False) and o.write('\n')
276
277 emit_var(func, o, d, False) and o.write('\n')
278 newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
279 newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
280 seen = set()
281 while newdeps:
282 deps = newdeps
283 seen |= deps
284 newdeps = set()
285 for dep in deps:
286 if d.getVarFlag(dep, "func") and not d.getVarFlag(dep, "python"):
287 emit_var(dep, o, d, False) and o.write('\n')
288 newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
289 newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
290 newdeps -= seen
291
292_functionfmt = """
293def {function}(d):
294{body}"""
295
296def emit_func_python(func, o=sys.__stdout__, d = init()):
297 """Emits all items in the data store in a format such that it can be sourced by a shell."""
298
299 def write_func(func, o, call = False):
300 body = d.getVar(func, True)
301 if not body.startswith("def"):
302 body = _functionfmt.format(function=func, body=body)
303
304 o.write(body.strip() + "\n\n")
305 if call:
306 o.write(func + "(d)" + "\n\n")
307
308 write_func(func, o, True)
309 pp = bb.codeparser.PythonParser(func, logger)
310 pp.parse_python(d.getVar(func, True))
311 newdeps = pp.execs
312 newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
313 seen = set()
314 while newdeps:
315 deps = newdeps
316 seen |= deps
317 newdeps = set()
318 for dep in deps:
319 if d.getVarFlag(dep, "func") and d.getVarFlag(dep, "python"):
320 write_func(dep, o)
321 pp = bb.codeparser.PythonParser(dep, logger)
322 pp.parse_python(d.getVar(dep, True))
323 newdeps |= pp.execs
324 newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
325 newdeps -= seen
326
327def update_data(d):
328 """Performs final steps upon the datastore, including application of overrides"""
329 d.finalize(parent = True)
330
331def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
332 deps = set()
333 try:
334 if key[-1] == ']':
335 vf = key[:-1].split('[')
336 value = d.getVarFlag(vf[0], vf[1], False)
337 parser = d.expandWithRefs(value, key)
338 deps |= parser.references
339 deps = deps | (keys & parser.execs)
340 return deps, value
341 varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs"]) or {}
342 vardeps = varflags.get("vardeps")
343 value = d.getVar(key, False)
344
345 def handle_contains(value, contains, d):
346 newvalue = ""
347 for k in sorted(contains):
348 l = (d.getVar(k, True) or "").split()
349 for word in sorted(contains[k]):
350 if word in l:
351 newvalue += "\n%s{%s} = Set" % (k, word)
352 else:
353 newvalue += "\n%s{%s} = Unset" % (k, word)
354 if not newvalue:
355 return value
356 if not value:
357 return newvalue
358 return value + newvalue
359
360 if "vardepvalue" in varflags:
361 value = varflags.get("vardepvalue")
362 elif varflags.get("func"):
363 if varflags.get("python"):
364 parsedvar = d.expandWithRefs(value, key)
365 parser = bb.codeparser.PythonParser(key, logger)
366 if parsedvar.value and "\t" in parsedvar.value:
367 logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
368 parser.parse_python(parsedvar.value)
369 deps = deps | parser.references
370 value = handle_contains(value, parser.contains, d)
371 else:
372 parsedvar = d.expandWithRefs(value, key)
373 parser = bb.codeparser.ShellParser(key, logger)
374 parser.parse_shell(parsedvar.value)
375 deps = deps | shelldeps
376 if vardeps is None:
377 parser.log.flush()
378 if "prefuncs" in varflags:
379 deps = deps | set(varflags["prefuncs"].split())
380 if "postfuncs" in varflags:
381 deps = deps | set(varflags["postfuncs"].split())
382 deps = deps | parsedvar.references
383 deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
384 value = handle_contains(value, parsedvar.contains, d)
385 else:
386 parser = d.expandWithRefs(value, key)
387 deps |= parser.references
388 deps = deps | (keys & parser.execs)
389 value = handle_contains(value, parser.contains, d)
390
391 if "vardepvalueexclude" in varflags:
392 exclude = varflags.get("vardepvalueexclude")
393 for excl in exclude.split('|'):
394 if excl:
395 value = value.replace(excl, '')
396
397 # Add varflags, assuming an exclusion list is set
398 if varflagsexcl:
399 varfdeps = []
400 for f in varflags:
401 if f not in varflagsexcl:
402 varfdeps.append('%s[%s]' % (key, f))
403 if varfdeps:
404 deps |= set(varfdeps)
405
406 deps |= set((vardeps or "").split())
407 deps -= set(varflags.get("vardepsexclude", "").split())
408 except Exception as e:
409 raise bb.data_smart.ExpansionError(key, None, e)
410 return deps, value
411 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
412 #d.setVarFlag(key, "vardeps", deps)
413
414def generate_dependencies(d):
415
416 keys = set(key for key in d if not key.startswith("__"))
417 shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport"))
418 varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
419
420 deps = {}
421 values = {}
422
423 tasklist = d.getVar('__BBTASKS') or []
424 for task in tasklist:
425 deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
426 newdeps = deps[task]
427 seen = set()
428 while newdeps:
429 nextdeps = newdeps
430 seen |= nextdeps
431 newdeps = set()
432 for dep in nextdeps:
433 if dep not in deps:
434 deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d)
435 newdeps |= deps[dep]
436 newdeps -= seen
437 #print "For %s: %s" % (task, str(deps[task]))
438 return tasklist, deps, values
439
440def inherits_class(klass, d):
441 val = getVar('__inherit_cache', d) or []
442 needle = os.path.join('classes', '%s.bbclass' % klass)
443 for v in val:
444 if v.endswith(needle):
445 return True
446 return False
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
new file mode 100644
index 0000000000..d4bb98dd74
--- /dev/null
+++ b/bitbake/lib/bb/data_smart.py
@@ -0,0 +1,811 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Smart Dictionary Implementation
5
6Functions for interacting with the data structure used by the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2004, 2005 Seb Frankengul
13# Copyright (C) 2005, 2006 Holger Hans Peter Freyther
14# Copyright (C) 2005 Uli Luckas
15# Copyright (C) 2005 ROAD GmbH
16#
17# This program is free software; you can redistribute it and/or modify
18# it under the terms of the GNU General Public License version 2 as
19# published by the Free Software Foundation.
20#
21# This program is distributed in the hope that it will be useful,
22# but WITHOUT ANY WARRANTY; without even the implied warranty of
23# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24# GNU General Public License for more details.
25#
26# You should have received a copy of the GNU General Public License along
27# with this program; if not, write to the Free Software Foundation, Inc.,
28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
29# Based on functions from the base bb module, Copyright 2003 Holger Schurig
30
31import copy, re, sys, traceback
32from collections import MutableMapping
33import logging
34import hashlib
35import bb, bb.codeparser
36from bb import utils
37from bb.COW import COWDictBase
38
39logger = logging.getLogger("BitBake.Data")
40
41__setvar_keyword__ = ["_append", "_prepend", "_remove"]
42__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>.*))?$')
43__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t ]+}")
44__expand_python_regexp__ = re.compile(r"\${@.+?}")
45
46def infer_caller_details(loginfo, parent = False, varval = True):
47 """Save the caller the trouble of specifying everything."""
48 # Save effort.
49 if 'ignore' in loginfo and loginfo['ignore']:
50 return
51 # If nothing was provided, mark this as possibly unneeded.
52 if not loginfo:
53 loginfo['ignore'] = True
54 return
55 # Infer caller's likely values for variable (var) and value (value),
56 # to reduce clutter in the rest of the code.
57 if varval and ('variable' not in loginfo or 'detail' not in loginfo):
58 try:
59 raise Exception
60 except Exception:
61 tb = sys.exc_info()[2]
62 if parent:
63 above = tb.tb_frame.f_back.f_back
64 else:
65 above = tb.tb_frame.f_back
66 lcls = above.f_locals.items()
67 for k, v in lcls:
68 if k == 'value' and 'detail' not in loginfo:
69 loginfo['detail'] = v
70 if k == 'var' and 'variable' not in loginfo:
71 loginfo['variable'] = v
72 # Infer file/line/function from traceback
73 if 'file' not in loginfo:
74 depth = 3
75 if parent:
76 depth = 4
77 file, line, func, text = traceback.extract_stack(limit = depth)[0]
78 loginfo['file'] = file
79 loginfo['line'] = line
80 if func not in loginfo:
81 loginfo['func'] = func
82
83class VariableParse:
84 def __init__(self, varname, d, val = None):
85 self.varname = varname
86 self.d = d
87 self.value = val
88
89 self.references = set()
90 self.execs = set()
91 self.contains = {}
92
93 def var_sub(self, match):
94 key = match.group()[2:-1]
95 if self.varname and key:
96 if self.varname == key:
97 raise Exception("variable %s references itself!" % self.varname)
98 if key in self.d.expand_cache:
99 varparse = self.d.expand_cache[key]
100 var = varparse.value
101 else:
102 var = self.d.getVarFlag(key, "_content", True)
103 self.references.add(key)
104 if var is not None:
105 return var
106 else:
107 return match.group()
108
109 def python_sub(self, match):
110 code = match.group()[3:-1]
111 codeobj = compile(code.strip(), self.varname or "<expansion>", "eval")
112
113 parser = bb.codeparser.PythonParser(self.varname, logger)
114 parser.parse_python(code)
115 if self.varname:
116 vardeps = self.d.getVarFlag(self.varname, "vardeps", True)
117 if vardeps is None:
118 parser.log.flush()
119 else:
120 parser.log.flush()
121 self.references |= parser.references
122 self.execs |= parser.execs
123
124 for k in parser.contains:
125 if k not in self.contains:
126 self.contains[k] = parser.contains[k].copy()
127 else:
128 self.contains[k].update(parser.contains[k])
129 value = utils.better_eval(codeobj, DataContext(self.d))
130 return str(value)
131
132
133class DataContext(dict):
134 def __init__(self, metadata, **kwargs):
135 self.metadata = metadata
136 dict.__init__(self, **kwargs)
137 self['d'] = metadata
138
139 def __missing__(self, key):
140 value = self.metadata.getVar(key, True)
141 if value is None or self.metadata.getVarFlag(key, 'func'):
142 raise KeyError(key)
143 else:
144 return value
145
146class ExpansionError(Exception):
147 def __init__(self, varname, expression, exception):
148 self.expression = expression
149 self.variablename = varname
150 self.exception = exception
151 if varname:
152 if expression:
153 self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
154 else:
155 self.msg = "Failure expanding variable %s: %s: %s" % (varname, type(exception).__name__, exception)
156 else:
157 self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
158 Exception.__init__(self, self.msg)
159 self.args = (varname, expression, exception)
160 def __str__(self):
161 return self.msg
162
163class IncludeHistory(object):
164 def __init__(self, parent = None, filename = '[TOP LEVEL]'):
165 self.parent = parent
166 self.filename = filename
167 self.children = []
168 self.current = self
169
170 def copy(self):
171 new = IncludeHistory(self.parent, self.filename)
172 for c in self.children:
173 new.children.append(c)
174 return new
175
176 def include(self, filename):
177 newfile = IncludeHistory(self.current, filename)
178 self.current.children.append(newfile)
179 self.current = newfile
180 return self
181
182 def __enter__(self):
183 pass
184
185 def __exit__(self, a, b, c):
186 if self.current.parent:
187 self.current = self.current.parent
188 else:
189 bb.warn("Include log: Tried to finish '%s' at top level." % filename)
190 return False
191
192 def emit(self, o, level = 0):
193 """Emit an include history file, and its children."""
194 if level:
195 spaces = " " * (level - 1)
196 o.write("# %s%s" % (spaces, self.filename))
197 if len(self.children) > 0:
198 o.write(" includes:")
199 else:
200 o.write("#\n# INCLUDE HISTORY:\n#")
201 level = level + 1
202 for child in self.children:
203 o.write("\n")
204 child.emit(o, level)
205
206class VariableHistory(object):
207 def __init__(self, dataroot):
208 self.dataroot = dataroot
209 self.variables = COWDictBase.copy()
210
211 def copy(self):
212 new = VariableHistory(self.dataroot)
213 new.variables = self.variables.copy()
214 return new
215
216 def record(self, *kwonly, **loginfo):
217 if not self.dataroot._tracking:
218 return
219 if len(kwonly) > 0:
220 raise TypeError
221 infer_caller_details(loginfo, parent = True)
222 if 'ignore' in loginfo and loginfo['ignore']:
223 return
224 if 'op' not in loginfo or not loginfo['op']:
225 loginfo['op'] = 'set'
226 if 'detail' in loginfo:
227 loginfo['detail'] = str(loginfo['detail'])
228 if 'variable' not in loginfo or 'file' not in loginfo:
229 raise ValueError("record() missing variable or file.")
230 var = loginfo['variable']
231
232 if var not in self.variables:
233 self.variables[var] = []
234 self.variables[var].append(loginfo.copy())
235
236 def variable(self, var):
237 if var in self.variables:
238 return self.variables[var]
239 else:
240 return []
241
242 def emit(self, var, oval, val, o):
243 history = self.variable(var)
244 commentVal = re.sub('\n', '\n#', str(oval))
245 if history:
246 if len(history) == 1:
247 o.write("#\n# $%s\n" % var)
248 else:
249 o.write("#\n# $%s [%d operations]\n" % (var, len(history)))
250 for event in history:
251 # o.write("# %s\n" % str(event))
252 if 'func' in event:
253 # If we have a function listed, this is internal
254 # code, not an operation in a config file, and the
255 # full path is distracting.
256 event['file'] = re.sub('.*/', '', event['file'])
257 display_func = ' [%s]' % event['func']
258 else:
259 display_func = ''
260 if 'flag' in event:
261 flag = '[%s] ' % (event['flag'])
262 else:
263 flag = ''
264 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail'])))
265 if len(history) > 1:
266 o.write("# pre-expansion value:\n")
267 o.write('# "%s"\n' % (commentVal))
268 else:
269 o.write("#\n# $%s\n# [no history recorded]\n#\n" % var)
270 o.write('# "%s"\n' % (commentVal))
271
272 def get_variable_files(self, var):
273 """Get the files where operations are made on a variable"""
274 var_history = self.variable(var)
275 files = []
276 for event in var_history:
277 files.append(event['file'])
278 return files
279
280 def get_variable_lines(self, var, f):
281 """Get the line where a operation is made on a variable in file f"""
282 var_history = self.variable(var)
283 lines = []
284 for event in var_history:
285 if f== event['file']:
286 line = event['line']
287 lines.append(line)
288 return lines
289
290 def del_var_history(self, var, f=None, line=None):
291 """If file f and line are not given, the entire history of var is deleted"""
292 if var in self.variables:
293 if f and line:
294 self.variables[var] = [ x for x in self.variables[var] if x['file']!=f and x['line']!=line]
295 else:
296 self.variables[var] = []
297
298class DataSmart(MutableMapping):
299 def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
300 self.dict = {}
301
302 self.inchistory = IncludeHistory()
303 self.varhistory = VariableHistory(self)
304 self._tracking = False
305
306 # cookie monster tribute
307 self._special_values = special
308 self._seen_overrides = seen
309
310 self.expand_cache = {}
311
312 def enableTracking(self):
313 self._tracking = True
314
315 def disableTracking(self):
316 self._tracking = False
317
318 def expandWithRefs(self, s, varname):
319
320 if not isinstance(s, basestring): # sanity check
321 return VariableParse(varname, self, s)
322
323 if varname and varname in self.expand_cache:
324 return self.expand_cache[varname]
325
326 varparse = VariableParse(varname, self)
327
328 while s.find('${') != -1:
329 olds = s
330 try:
331 s = __expand_var_regexp__.sub(varparse.var_sub, s)
332 s = __expand_python_regexp__.sub(varparse.python_sub, s)
333 if s == olds:
334 break
335 except ExpansionError:
336 raise
337 except bb.parse.SkipRecipe:
338 raise
339 except Exception as exc:
340 raise ExpansionError(varname, s, exc)
341
342 varparse.value = s
343
344 if varname:
345 self.expand_cache[varname] = varparse
346
347 return varparse
348
349 def expand(self, s, varname = None):
350 return self.expandWithRefs(s, varname).value
351
352
353 def finalize(self, parent = False):
354 """Performs final steps upon the datastore, including application of overrides"""
355
356 overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
357 finalize_caller = {
358 'op': 'finalize',
359 }
360 infer_caller_details(finalize_caller, parent = parent, varval = False)
361
362 #
363 # Well let us see what breaks here. We used to iterate
364 # over each variable and apply the override and then
365 # do the line expanding.
366 # If we have bad luck - which we will have - the keys
367 # where in some order that is so important for this
368 # method which we don't have anymore.
369 # Anyway we will fix that and write test cases this
370 # time.
371
372 #
373 # First we apply all overrides
374 # Then we will handle _append and _prepend and store the _remove
375 # information for later.
376 #
377
378 # We only want to report finalization once per variable overridden.
379 finalizes_reported = {}
380
381 for o in overrides:
382 # calculate '_'+override
383 l = len(o) + 1
384
385 # see if one should even try
386 if o not in self._seen_overrides:
387 continue
388
389 vars = self._seen_overrides[o].copy()
390 for var in vars:
391 name = var[:-l]
392 try:
393 # Report only once, even if multiple changes.
394 if name not in finalizes_reported:
395 finalizes_reported[name] = True
396 finalize_caller['variable'] = name
397 finalize_caller['detail'] = 'was: ' + str(self.getVar(name, False))
398 self.varhistory.record(**finalize_caller)
399 # Copy history of the override over.
400 for event in self.varhistory.variable(var):
401 loginfo = event.copy()
402 loginfo['variable'] = name
403 loginfo['op'] = 'override[%s]:%s' % (o, loginfo['op'])
404 self.varhistory.record(**loginfo)
405 self.setVar(name, self.getVar(var, False), op = 'finalize', file = 'override[%s]' % o, line = '')
406 self.delVar(var)
407 except Exception:
408 logger.info("Untracked delVar")
409
410 # now on to the appends and prepends, and stashing the removes
411 for op in __setvar_keyword__:
412 if op in self._special_values:
413 appends = self._special_values[op] or []
414 for append in appends:
415 keep = []
416 for (a, o) in self.getVarFlag(append, op) or []:
417 match = True
418 if o:
419 for o2 in o.split("_"):
420 if not o2 in overrides:
421 match = False
422 if not match:
423 keep.append((a ,o))
424 continue
425
426 if op == "_append":
427 sval = self.getVar(append, False) or ""
428 sval += a
429 self.setVar(append, sval)
430 elif op == "_prepend":
431 sval = a + (self.getVar(append, False) or "")
432 self.setVar(append, sval)
433 elif op == "_remove":
434 removes = self.getVarFlag(append, "_removeactive", False) or []
435 removes.extend(a.split())
436 self.setVarFlag(append, "_removeactive", removes, ignore=True)
437
438 # We save overrides that may be applied at some later stage
439 if keep:
440 self.setVarFlag(append, op, keep, ignore=True)
441 else:
442 self.delVarFlag(append, op, ignore=True)
443
444 def initVar(self, var):
445 self.expand_cache = {}
446 if not var in self.dict:
447 self.dict[var] = {}
448
449 def _findVar(self, var):
450 dest = self.dict
451 while dest:
452 if var in dest:
453 return dest[var]
454
455 if "_data" not in dest:
456 break
457 dest = dest["_data"]
458
459 def _makeShadowCopy(self, var):
460 if var in self.dict:
461 return
462
463 local_var = self._findVar(var)
464
465 if local_var:
466 self.dict[var] = copy.copy(local_var)
467 else:
468 self.initVar(var)
469
470
471 def setVar(self, var, value, **loginfo):
472 #print("var=" + str(var) + " val=" + str(value))
473 if 'op' not in loginfo:
474 loginfo['op'] = "set"
475 self.expand_cache = {}
476 match = __setvar_regexp__.match(var)
477 if match and match.group("keyword") in __setvar_keyword__:
478 base = match.group('base')
479 keyword = match.group("keyword")
480 override = match.group('add')
481 l = self.getVarFlag(base, keyword) or []
482 l.append([value, override])
483 self.setVarFlag(base, keyword, l, ignore=True)
484 # And cause that to be recorded:
485 loginfo['detail'] = value
486 loginfo['variable'] = base
487 if override:
488 loginfo['op'] = '%s[%s]' % (keyword, override)
489 else:
490 loginfo['op'] = keyword
491 self.varhistory.record(**loginfo)
492 # todo make sure keyword is not __doc__ or __module__
493 # pay the cookie monster
494 try:
495 self._special_values[keyword].add(base)
496 except KeyError:
497 self._special_values[keyword] = set()
498 self._special_values[keyword].add(base)
499
500 return
501
502 if not var in self.dict:
503 self._makeShadowCopy(var)
504
505 # more cookies for the cookie monster
506 if '_' in var:
507 self._setvar_update_overrides(var)
508
509 # setting var
510 self.dict[var]["_content"] = value
511 self.varhistory.record(**loginfo)
512
513 def _setvar_update_overrides(self, var):
514 # aka pay the cookie monster
515 override = var[var.rfind('_')+1:]
516 shortvar = var[:var.rfind('_')]
517 while override:
518 if override not in self._seen_overrides:
519 self._seen_overrides[override] = set()
520 self._seen_overrides[override].add( var )
521 override = None
522 if "_" in shortvar:
523 override = var[shortvar.rfind('_')+1:]
524 shortvar = var[:shortvar.rfind('_')]
525
526 def getVar(self, var, expand=False, noweakdefault=False):
527 return self.getVarFlag(var, "_content", expand, noweakdefault)
528
529 def renameVar(self, key, newkey, **loginfo):
530 """
531 Rename the variable key to newkey
532 """
533 val = self.getVar(key, 0)
534 if val is not None:
535 loginfo['variable'] = newkey
536 loginfo['op'] = 'rename from %s' % key
537 loginfo['detail'] = val
538 self.varhistory.record(**loginfo)
539 self.setVar(newkey, val, ignore=True)
540
541 for i in (__setvar_keyword__):
542 src = self.getVarFlag(key, i)
543 if src is None:
544 continue
545
546 dest = self.getVarFlag(newkey, i) or []
547 dest.extend(src)
548 self.setVarFlag(newkey, i, dest, ignore=True)
549
550 if i in self._special_values and key in self._special_values[i]:
551 self._special_values[i].remove(key)
552 self._special_values[i].add(newkey)
553
554 loginfo['variable'] = key
555 loginfo['op'] = 'rename (to)'
556 loginfo['detail'] = newkey
557 self.varhistory.record(**loginfo)
558 self.delVar(key, ignore=True)
559
560 def appendVar(self, var, value, **loginfo):
561 loginfo['op'] = 'append'
562 self.varhistory.record(**loginfo)
563 newvalue = (self.getVar(var, False) or "") + value
564 self.setVar(var, newvalue, ignore=True)
565
566 def prependVar(self, var, value, **loginfo):
567 loginfo['op'] = 'prepend'
568 self.varhistory.record(**loginfo)
569 newvalue = value + (self.getVar(var, False) or "")
570 self.setVar(var, newvalue, ignore=True)
571
572 def delVar(self, var, **loginfo):
573 loginfo['detail'] = ""
574 loginfo['op'] = 'del'
575 self.varhistory.record(**loginfo)
576 self.expand_cache = {}
577 self.dict[var] = {}
578 if '_' in var:
579 override = var[var.rfind('_')+1:]
580 if override and override in self._seen_overrides and var in self._seen_overrides[override]:
581 self._seen_overrides[override].remove(var)
582
583 def setVarFlag(self, var, flag, value, **loginfo):
584 if 'op' not in loginfo:
585 loginfo['op'] = "set"
586 loginfo['flag'] = flag
587 self.varhistory.record(**loginfo)
588 if not var in self.dict:
589 self._makeShadowCopy(var)
590 self.dict[var][flag] = value
591
592 if flag == "defaultval" and '_' in var:
593 self._setvar_update_overrides(var)
594
595 if flag == "unexport" or flag == "export":
596 if not "__exportlist" in self.dict:
597 self._makeShadowCopy("__exportlist")
598 if not "_content" in self.dict["__exportlist"]:
599 self.dict["__exportlist"]["_content"] = set()
600 self.dict["__exportlist"]["_content"].add(var)
601
602 def getVarFlag(self, var, flag, expand=False, noweakdefault=False):
603 local_var = self._findVar(var)
604 value = None
605 if local_var is not None:
606 if flag in local_var:
607 value = copy.copy(local_var[flag])
608 elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
609 value = copy.copy(local_var["defaultval"])
610 if expand and value:
611 # Only getvar (flag == _content) hits the expand cache
612 cachename = None
613 if flag == "_content":
614 cachename = var
615 else:
616 cachename = var + "[" + flag + "]"
617 value = self.expand(value, cachename)
618 if value and flag == "_content" and local_var is not None and "_removeactive" in local_var:
619 removes = [self.expand(r).split() for r in local_var["_removeactive"]]
620 removes = reduce(lambda a, b: a+b, removes, [])
621 filtered = filter(lambda v: v not in removes,
622 value.split())
623 value = " ".join(filtered)
624 if expand:
625 # We need to ensure the expand cache has the correct value
626 # flag == "_content" here
627 self.expand_cache[var].value = value
628 return value
629
630 def delVarFlag(self, var, flag, **loginfo):
631 local_var = self._findVar(var)
632 if not local_var:
633 return
634 if not var in self.dict:
635 self._makeShadowCopy(var)
636
637 if var in self.dict and flag in self.dict[var]:
638 loginfo['detail'] = ""
639 loginfo['op'] = 'delFlag'
640 loginfo['flag'] = flag
641 self.varhistory.record(**loginfo)
642
643 del self.dict[var][flag]
644
645 def appendVarFlag(self, var, flag, value, **loginfo):
646 loginfo['op'] = 'append'
647 loginfo['flag'] = flag
648 self.varhistory.record(**loginfo)
649 newvalue = (self.getVarFlag(var, flag, False) or "") + value
650 self.setVarFlag(var, flag, newvalue, ignore=True)
651
652 def prependVarFlag(self, var, flag, value, **loginfo):
653 loginfo['op'] = 'prepend'
654 loginfo['flag'] = flag
655 self.varhistory.record(**loginfo)
656 newvalue = value + (self.getVarFlag(var, flag, False) or "")
657 self.setVarFlag(var, flag, newvalue, ignore=True)
658
659 def setVarFlags(self, var, flags, **loginfo):
660 infer_caller_details(loginfo)
661 if not var in self.dict:
662 self._makeShadowCopy(var)
663
664 for i in flags:
665 if i == "_content":
666 continue
667 loginfo['flag'] = i
668 loginfo['detail'] = flags[i]
669 self.varhistory.record(**loginfo)
670 self.dict[var][i] = flags[i]
671
672 def getVarFlags(self, var, expand = False, internalflags=False):
673 local_var = self._findVar(var)
674 flags = {}
675
676 if local_var:
677 for i in local_var:
678 if i.startswith("_") and not internalflags:
679 continue
680 flags[i] = local_var[i]
681 if expand and i in expand:
682 flags[i] = self.expand(flags[i], var + "[" + i + "]")
683 if len(flags) == 0:
684 return None
685 return flags
686
687
688 def delVarFlags(self, var, **loginfo):
689 if not var in self.dict:
690 self._makeShadowCopy(var)
691
692 if var in self.dict:
693 content = None
694
695 loginfo['op'] = 'delete flags'
696 self.varhistory.record(**loginfo)
697
698 # try to save the content
699 if "_content" in self.dict[var]:
700 content = self.dict[var]["_content"]
701 self.dict[var] = {}
702 self.dict[var]["_content"] = content
703 else:
704 del self.dict[var]
705
706
707 def createCopy(self):
708 """
709 Create a copy of self by setting _data to self
710 """
711 # we really want this to be a DataSmart...
712 data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
713 data.dict["_data"] = self.dict
714 data.varhistory = self.varhistory.copy()
715 data.varhistory.datasmart = data
716 data.inchistory = self.inchistory.copy()
717
718 data._tracking = self._tracking
719
720 return data
721
722 def expandVarref(self, variable, parents=False):
723 """Find all references to variable in the data and expand it
724 in place, optionally descending to parent datastores."""
725
726 if parents:
727 keys = iter(self)
728 else:
729 keys = self.localkeys()
730
731 ref = '${%s}' % variable
732 value = self.getVar(variable, False)
733 for key in keys:
734 referrervalue = self.getVar(key, False)
735 if referrervalue and ref in referrervalue:
736 self.setVar(key, referrervalue.replace(ref, value))
737
738 def localkeys(self):
739 for key in self.dict:
740 if key != '_data':
741 yield key
742
743 def __iter__(self):
744 def keylist(d):
745 klist = set()
746 for key in d:
747 if key == "_data":
748 continue
749 if not d[key]:
750 continue
751 klist.add(key)
752
753 if "_data" in d:
754 klist |= keylist(d["_data"])
755
756 return klist
757
758 for k in keylist(self.dict):
759 yield k
760
761 def __len__(self):
762 return len(frozenset(self))
763
764 def __getitem__(self, item):
765 value = self.getVar(item, False)
766 if value is None:
767 raise KeyError(item)
768 else:
769 return value
770
771 def __setitem__(self, var, value):
772 self.setVar(var, value)
773
774 def __delitem__(self, var):
775 self.delVar(var)
776
777 def get_hash(self):
778 data = {}
779 d = self.createCopy()
780 bb.data.expandKeys(d)
781 bb.data.update_data(d)
782
783 config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split())
784 keys = set(key for key in iter(d) if not key.startswith("__"))
785 for key in keys:
786 if key in config_whitelist:
787 continue
788
789 value = d.getVar(key, False) or ""
790 data.update({key:value})
791
792 varflags = d.getVarFlags(key, internalflags = True)
793 if not varflags:
794 continue
795 for f in varflags:
796 if f == "_content":
797 continue
798 data.update({'%s[%s]' % (key, f):varflags[f]})
799
800 for key in ["__BBTASKS", "__BBANONFUNCS", "__BBHANDLERS"]:
801 bb_list = d.getVar(key, False) or []
802 bb_list.sort()
803 data.update({key:str(bb_list)})
804
805 if key == "__BBANONFUNCS":
806 for i in bb_list:
807 value = d.getVar(i, True) or ""
808 data.update({i:value})
809
810 data_str = str([(k, data[k]) for k in sorted(data.keys())])
811 return hashlib.md5(data_str).hexdigest()
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
new file mode 100644
index 0000000000..6cbd0d55db
--- /dev/null
+++ b/bitbake/lib/bb/event.py
@@ -0,0 +1,639 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Event' implementation
5
6Classes and functions for manipulating 'events' in the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import os, sys
26import warnings
27try:
28 import cPickle as pickle
29except ImportError:
30 import pickle
31import logging
32import atexit
33import traceback
34import bb.utils
35import bb.compat
36import bb.exceptions
37
38# This is the pid for which we should generate the event. This is set when
39# the runqueue forks off.
40worker_pid = 0
41worker_fire = None
42
43logger = logging.getLogger('BitBake.Event')
44
45class Event(object):
46 """Base class for events"""
47
48 def __init__(self):
49 self.pid = worker_pid
50
51Registered = 10
52AlreadyRegistered = 14
53
54def get_class_handlers():
55 return _handlers
56
57def set_class_handlers(h):
58 global _handlers
59 _handlers = h
60
61def clean_class_handlers():
62 return bb.compat.OrderedDict()
63
64# Internal
65_handlers = clean_class_handlers()
66_ui_handlers = {}
67_ui_logfilters = {}
68_ui_handler_seq = 0
69_event_handler_map = {}
70_catchall_handlers = {}
71
72def execute_handler(name, handler, event, d):
73 event.data = d
74 try:
75 ret = handler(event)
76 except (bb.parse.SkipRecipe, bb.BBHandledException):
77 raise
78 except Exception:
79 etype, value, tb = sys.exc_info()
80 logger.error("Execution of event handler '%s' failed" % name,
81 exc_info=(etype, value, tb.tb_next))
82 raise
83 except SystemExit as exc:
84 if exc.code != 0:
85 logger.error("Execution of event handler '%s' failed" % name)
86 raise
87 finally:
88 del event.data
89
90def fire_class_handlers(event, d):
91 if isinstance(event, logging.LogRecord):
92 return
93
94 eid = str(event.__class__)[8:-2]
95 evt_hmap = _event_handler_map.get(eid, {})
96 for name, handler in _handlers.iteritems():
97 if name in _catchall_handlers or name in evt_hmap:
98 execute_handler(name, handler, event, d)
99
100ui_queue = []
101@atexit.register
102def print_ui_queue():
103 """If we're exiting before a UI has been spawned, display any queued
104 LogRecords to the console."""
105 logger = logging.getLogger("BitBake")
106 if not _ui_handlers:
107 from bb.msg import BBLogFormatter
108 console = logging.StreamHandler(sys.stdout)
109 console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
110 logger.handlers = [console]
111
112 # First check to see if we have any proper messages
113 msgprint = False
114 for event in ui_queue:
115 if isinstance(event, logging.LogRecord):
116 if event.levelno > logging.DEBUG:
117 logger.handle(event)
118 msgprint = True
119 if msgprint:
120 return
121
122 # Nope, so just print all of the messages we have (including debug messages)
123 for event in ui_queue:
124 if isinstance(event, logging.LogRecord):
125 logger.handle(event)
126
127def fire_ui_handlers(event, d):
128 if not _ui_handlers:
129 # No UI handlers registered yet, queue up the messages
130 ui_queue.append(event)
131 return
132
133 errors = []
134 for h in _ui_handlers:
135 #print "Sending event %s" % event
136 try:
137 if not _ui_logfilters[h].filter(event):
138 continue
139 # We use pickle here since it better handles object instances
140 # which xmlrpc's marshaller does not. Events *must* be serializable
141 # by pickle.
142 if hasattr(_ui_handlers[h].event, "sendpickle"):
143 _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
144 else:
145 _ui_handlers[h].event.send(event)
146 except:
147 errors.append(h)
148 for h in errors:
149 del _ui_handlers[h]
150
151def fire(event, d):
152 """Fire off an Event"""
153
154 # We can fire class handlers in the worker process context and this is
155 # desired so they get the task based datastore.
156 # UI handlers need to be fired in the server context so we defer this. They
157 # don't have a datastore so the datastore context isn't a problem.
158
159 fire_class_handlers(event, d)
160 if worker_fire:
161 worker_fire(event, d)
162 else:
163 fire_ui_handlers(event, d)
164
165def fire_from_worker(event, d):
166 fire_ui_handlers(event, d)
167
168noop = lambda _: None
169def register(name, handler, mask=[]):
170 """Register an Event handler"""
171
172 # already registered
173 if name in _handlers:
174 return AlreadyRegistered
175
176 if handler is not None:
177 # handle string containing python code
178 if isinstance(handler, basestring):
179 tmp = "def %s(e):\n%s" % (name, handler)
180 try:
181 code = compile(tmp, "%s(e)" % name, "exec")
182 except SyntaxError:
183 logger.error("Unable to register event handler '%s':\n%s", name,
184 ''.join(traceback.format_exc(limit=0)))
185 _handlers[name] = noop
186 return
187 env = {}
188 bb.utils.better_exec(code, env)
189 func = bb.utils.better_eval(name, env)
190 _handlers[name] = func
191 else:
192 _handlers[name] = handler
193
194 if not mask or '*' in mask:
195 _catchall_handlers[name] = True
196 else:
197 for m in mask:
198 if _event_handler_map.get(m, None) is None:
199 _event_handler_map[m] = {}
200 _event_handler_map[m][name] = True
201
202 return Registered
203
204def remove(name, handler):
205 """Remove an Event handler"""
206 _handlers.pop(name)
207
208def register_UIHhandler(handler):
209 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
210 _ui_handlers[_ui_handler_seq] = handler
211 level, debug_domains = bb.msg.constructLogOptions()
212 _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
213 return _ui_handler_seq
214
215def unregister_UIHhandler(handlerNum):
216 if handlerNum in _ui_handlers:
217 del _ui_handlers[handlerNum]
218 return
219
220# Class to allow filtering of events and specific filtering of LogRecords *before* we put them over the IPC
221class UIEventFilter(object):
222 def __init__(self, level, debug_domains):
223 self.update(None, level, debug_domains)
224
225 def update(self, eventmask, level, debug_domains):
226 self.eventmask = eventmask
227 self.stdlevel = level
228 self.debug_domains = debug_domains
229
230 def filter(self, event):
231 if isinstance(event, logging.LogRecord):
232 if event.levelno >= self.stdlevel:
233 return True
234 if event.name in self.debug_domains and event.levelno >= self.debug_domains[event.name]:
235 return True
236 return False
237 eid = str(event.__class__)[8:-2]
238 if self.eventmask and eid not in self.eventmask:
239 return False
240 return True
241
242def set_UIHmask(handlerNum, level, debug_domains, mask):
243 if not handlerNum in _ui_handlers:
244 return False
245 if '*' in mask:
246 _ui_logfilters[handlerNum].update(None, level, debug_domains)
247 else:
248 _ui_logfilters[handlerNum].update(mask, level, debug_domains)
249 return True
250
251def getName(e):
252 """Returns the name of a class or class instance"""
253 if getattr(e, "__name__", None) == None:
254 return e.__class__.__name__
255 else:
256 return e.__name__
257
258class OperationStarted(Event):
259 """An operation has begun"""
260 def __init__(self, msg = "Operation Started"):
261 Event.__init__(self)
262 self.msg = msg
263
264class OperationCompleted(Event):
265 """An operation has completed"""
266 def __init__(self, total, msg = "Operation Completed"):
267 Event.__init__(self)
268 self.total = total
269 self.msg = msg
270
271class OperationProgress(Event):
272 """An operation is in progress"""
273 def __init__(self, current, total, msg = "Operation in Progress"):
274 Event.__init__(self)
275 self.current = current
276 self.total = total
277 self.msg = msg + ": %s/%s" % (current, total);
278
279class ConfigParsed(Event):
280 """Configuration Parsing Complete"""
281
282class RecipeEvent(Event):
283 def __init__(self, fn):
284 self.fn = fn
285 Event.__init__(self)
286
287class RecipePreFinalise(RecipeEvent):
288 """ Recipe Parsing Complete but not yet finialised"""
289
290class RecipeParsed(RecipeEvent):
291 """ Recipe Parsing Complete """
292
293class StampUpdate(Event):
294 """Trigger for any adjustment of the stamp files to happen"""
295
296 def __init__(self, targets, stampfns):
297 self._targets = targets
298 self._stampfns = stampfns
299 Event.__init__(self)
300
301 def getStampPrefix(self):
302 return self._stampfns
303
304 def getTargets(self):
305 return self._targets
306
307 stampPrefix = property(getStampPrefix)
308 targets = property(getTargets)
309
310class BuildBase(Event):
311 """Base class for bbmake run events"""
312
313 def __init__(self, n, p, failures = 0):
314 self._name = n
315 self._pkgs = p
316 Event.__init__(self)
317 self._failures = failures
318
319 def getPkgs(self):
320 return self._pkgs
321
322 def setPkgs(self, pkgs):
323 self._pkgs = pkgs
324
325 def getName(self):
326 return self._name
327
328 def setName(self, name):
329 self._name = name
330
331 def getCfg(self):
332 return self.data
333
334 def setCfg(self, cfg):
335 self.data = cfg
336
337 def getFailures(self):
338 """
339 Return the number of failed packages
340 """
341 return self._failures
342
343 pkgs = property(getPkgs, setPkgs, None, "pkgs property")
344 name = property(getName, setName, None, "name property")
345 cfg = property(getCfg, setCfg, None, "cfg property")
346
347
348
349
350
351class BuildStarted(BuildBase, OperationStarted):
352 """bbmake build run started"""
353 def __init__(self, n, p, failures = 0):
354 OperationStarted.__init__(self, "Building Started")
355 BuildBase.__init__(self, n, p, failures)
356
357class BuildCompleted(BuildBase, OperationCompleted):
358 """bbmake build run completed"""
359 def __init__(self, total, n, p, failures = 0):
360 if not failures:
361 OperationCompleted.__init__(self, total, "Building Succeeded")
362 else:
363 OperationCompleted.__init__(self, total, "Building Failed")
364 BuildBase.__init__(self, n, p, failures)
365
366class DiskFull(Event):
367 """Disk full case build aborted"""
368 def __init__(self, dev, type, freespace, mountpoint):
369 Event.__init__(self)
370 self._dev = dev
371 self._type = type
372 self._free = freespace
373 self._mountpoint = mountpoint
374
375class NoProvider(Event):
376 """No Provider for an Event"""
377
378 def __init__(self, item, runtime=False, dependees=None, reasons=[], close_matches=[]):
379 Event.__init__(self)
380 self._item = item
381 self._runtime = runtime
382 self._dependees = dependees
383 self._reasons = reasons
384 self._close_matches = close_matches
385
386 def getItem(self):
387 return self._item
388
389 def isRuntime(self):
390 return self._runtime
391
392class MultipleProviders(Event):
393 """Multiple Providers"""
394
395 def __init__(self, item, candidates, runtime = False):
396 Event.__init__(self)
397 self._item = item
398 self._candidates = candidates
399 self._is_runtime = runtime
400
401 def isRuntime(self):
402 """
403 Is this a runtime issue?
404 """
405 return self._is_runtime
406
407 def getItem(self):
408 """
409 The name for the to be build item
410 """
411 return self._item
412
413 def getCandidates(self):
414 """
415 Get the possible Candidates for a PROVIDER.
416 """
417 return self._candidates
418
419class ParseStarted(OperationStarted):
420 """Recipe parsing for the runqueue has begun"""
421 def __init__(self, total):
422 OperationStarted.__init__(self, "Recipe parsing Started")
423 self.total = total
424
425class ParseCompleted(OperationCompleted):
426 """Recipe parsing for the runqueue has completed"""
427 def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
428 OperationCompleted.__init__(self, total, "Recipe parsing Completed")
429 self.cached = cached
430 self.parsed = parsed
431 self.skipped = skipped
432 self.virtuals = virtuals
433 self.masked = masked
434 self.errors = errors
435 self.sofar = cached + parsed
436
437class ParseProgress(OperationProgress):
438 """Recipe parsing progress"""
439 def __init__(self, current, total):
440 OperationProgress.__init__(self, current, total, "Recipe parsing")
441
442
443class CacheLoadStarted(OperationStarted):
444 """Loading of the dependency cache has begun"""
445 def __init__(self, total):
446 OperationStarted.__init__(self, "Loading cache Started")
447 self.total = total
448
449class CacheLoadProgress(OperationProgress):
450 """Cache loading progress"""
451 def __init__(self, current, total):
452 OperationProgress.__init__(self, current, total, "Loading cache")
453
454class CacheLoadCompleted(OperationCompleted):
455 """Cache loading is complete"""
456 def __init__(self, total, num_entries):
457 OperationCompleted.__init__(self, total, "Loading cache Completed")
458 self.num_entries = num_entries
459
460class TreeDataPreparationStarted(OperationStarted):
461 """Tree data preparation started"""
462 def __init__(self):
463 OperationStarted.__init__(self, "Preparing tree data Started")
464
465class TreeDataPreparationProgress(OperationProgress):
466 """Tree data preparation is in progress"""
467 def __init__(self, current, total):
468 OperationProgress.__init__(self, current, total, "Preparing tree data")
469
470class TreeDataPreparationCompleted(OperationCompleted):
471 """Tree data preparation completed"""
472 def __init__(self, total):
473 OperationCompleted.__init__(self, total, "Preparing tree data Completed")
474
475class DepTreeGenerated(Event):
476 """
477 Event when a dependency tree has been generated
478 """
479
480 def __init__(self, depgraph):
481 Event.__init__(self)
482 self._depgraph = depgraph
483
484class TargetsTreeGenerated(Event):
485 """
486 Event when a set of buildable targets has been generated
487 """
488 def __init__(self, model):
489 Event.__init__(self)
490 self._model = model
491
492class FilesMatchingFound(Event):
493 """
494 Event when a list of files matching the supplied pattern has
495 been generated
496 """
497 def __init__(self, pattern, matches):
498 Event.__init__(self)
499 self._pattern = pattern
500 self._matches = matches
501
502class CoreBaseFilesFound(Event):
503 """
504 Event when a list of appropriate config files has been generated
505 """
506 def __init__(self, paths):
507 Event.__init__(self)
508 self._paths = paths
509
510class ConfigFilesFound(Event):
511 """
512 Event when a list of appropriate config files has been generated
513 """
514 def __init__(self, variable, values):
515 Event.__init__(self)
516 self._variable = variable
517 self._values = values
518
519class ConfigFilePathFound(Event):
520 """
521 Event when a path for a config file has been found
522 """
523 def __init__(self, path):
524 Event.__init__(self)
525 self._path = path
526
527class MsgBase(Event):
528 """Base class for messages"""
529
530 def __init__(self, msg):
531 self._message = msg
532 Event.__init__(self)
533
534class MsgDebug(MsgBase):
535 """Debug Message"""
536
537class MsgNote(MsgBase):
538 """Note Message"""
539
540class MsgWarn(MsgBase):
541 """Warning Message"""
542
543class MsgError(MsgBase):
544 """Error Message"""
545
546class MsgFatal(MsgBase):
547 """Fatal Message"""
548
549class MsgPlain(MsgBase):
550 """General output"""
551
552class LogExecTTY(Event):
553 """Send event containing program to spawn on tty of the logger"""
554 def __init__(self, msg, prog, sleep_delay, retries):
555 Event.__init__(self)
556 self.msg = msg
557 self.prog = prog
558 self.sleep_delay = sleep_delay
559 self.retries = retries
560
561class LogHandler(logging.Handler):
562 """Dispatch logging messages as bitbake events"""
563
564 def emit(self, record):
565 if record.exc_info:
566 etype, value, tb = record.exc_info
567 if hasattr(tb, 'tb_next'):
568 tb = list(bb.exceptions.extract_traceback(tb, context=3))
569 record.bb_exc_info = (etype, value, tb)
570 record.exc_info = None
571 fire(record, None)
572
573 def filter(self, record):
574 record.taskpid = worker_pid
575 return True
576
577class RequestPackageInfo(Event):
578 """
579 Event to request package information
580 """
581
582class PackageInfo(Event):
583 """
584 Package information for GUI
585 """
586 def __init__(self, pkginfolist):
587 Event.__init__(self)
588 self._pkginfolist = pkginfolist
589
590class MetadataEvent(Event):
591 """
592 Generic event that target for OE-Core classes
593 to report information during asynchrous execution
594 """
595 def __init__(self, eventtype, eventdata):
596 Event.__init__(self)
597 self.type = eventtype
598 self._localdata = eventdata
599
600class SanityCheck(Event):
601 """
602 Event to run sanity checks, either raise errors or generate events as return status.
603 """
604 def __init__(self, generateevents = True):
605 Event.__init__(self)
606 self.generateevents = generateevents
607
608class SanityCheckPassed(Event):
609 """
610 Event to indicate sanity check has passed
611 """
612
613class SanityCheckFailed(Event):
614 """
615 Event to indicate sanity check has failed
616 """
617 def __init__(self, msg, network_error=False):
618 Event.__init__(self)
619 self._msg = msg
620 self._network_error = network_error
621
622class NetworkTest(Event):
623 """
624 Event to run network connectivity tests, either raise errors or generate events as return status.
625 """
626 def __init__(self, generateevents = True):
627 Event.__init__(self)
628 self.generateevents = generateevents
629
630class NetworkTestPassed(Event):
631 """
632 Event to indicate network test has passed
633 """
634
635class NetworkTestFailed(Event):
636 """
637 Event to indicate network test has failed
638 """
639
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py
new file mode 100644
index 0000000000..f182c8fd62
--- /dev/null
+++ b/bitbake/lib/bb/exceptions.py
@@ -0,0 +1,91 @@
1from __future__ import absolute_import
2import inspect
3import traceback
4import bb.namedtuple_with_abc
5from collections import namedtuple
6
7
8class TracebackEntry(namedtuple.abc):
9 """Pickleable representation of a traceback entry"""
10 _fields = 'filename lineno function args code_context index'
11 _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
12
13 def format(self, formatter=None):
14 if not self.code_context:
15 return self._header.format(self) + '\n'
16
17 formatted = [self._header.format(self) + ':\n']
18
19 for lineindex, line in enumerate(self.code_context):
20 if formatter:
21 line = formatter(line)
22
23 if lineindex == self.index:
24 formatted.append(' >%s' % line)
25 else:
26 formatted.append(' %s' % line)
27 return formatted
28
29 def __str__(self):
30 return ''.join(self.format())
31
32def _get_frame_args(frame):
33 """Get the formatted arguments and class (if available) for a frame"""
34 arginfo = inspect.getargvalues(frame)
35
36 try:
37 if not arginfo.args:
38 return '', None
39 # There have been reports from the field of python 2.6 which doesn't
40 # return a namedtuple here but simply a tuple so fallback gracefully if
41 # args isn't present.
42 except AttributeError:
43 return '', None
44
45 firstarg = arginfo.args[0]
46 if firstarg == 'self':
47 self = arginfo.locals['self']
48 cls = self.__class__.__name__
49
50 arginfo.args.pop(0)
51 del arginfo.locals['self']
52 else:
53 cls = None
54
55 formatted = inspect.formatargvalues(*arginfo)
56 return formatted, cls
57
58def extract_traceback(tb, context=1):
59 frames = inspect.getinnerframes(tb, context)
60 for frame, filename, lineno, function, code_context, index in frames:
61 formatted_args, cls = _get_frame_args(frame)
62 if cls:
63 function = '%s.%s' % (cls, function)
64 yield TracebackEntry(filename, lineno, function, formatted_args,
65 code_context, index)
66
67def format_extracted(extracted, formatter=None, limit=None):
68 if limit:
69 extracted = extracted[-limit:]
70
71 formatted = []
72 for tracebackinfo in extracted:
73 formatted.extend(tracebackinfo.format(formatter))
74 return formatted
75
76
77def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
78 formatted = ['Traceback (most recent call last):\n']
79
80 if hasattr(tb, 'tb_next'):
81 tb = extract_traceback(tb, context)
82
83 formatted.extend(format_extracted(tb, formatter, limit))
84 formatted.extend(traceback.format_exception_only(etype, value))
85 return formatted
86
87def to_string(exc):
88 if isinstance(exc, SystemExit):
89 if not isinstance(exc.code, basestring):
90 return 'Exited with "%d"' % exc.code
91 return str(exc)
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000000..378d41e1cb
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1585 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from __future__ import absolute_import
29from __future__ import print_function
30import os, re
31import signal
32import glob
33import logging
34import urllib
35import urlparse
36import operator
37import bb.persist_data, bb.utils
38import bb.checksum
39from bb import data
40import bb.process
41import subprocess
42
43__version__ = "2"
44_checksum_cache = bb.checksum.FileChecksumCache()
45
46logger = logging.getLogger("BitBake.Fetcher")
47
48class BBFetchException(Exception):
49 """Class all fetch exceptions inherit from"""
50 def __init__(self, message):
51 self.msg = message
52 Exception.__init__(self, message)
53
54 def __str__(self):
55 return self.msg
56
57class MalformedUrl(BBFetchException):
58 """Exception raised when encountering an invalid url"""
59 def __init__(self, url, message=''):
60 if message:
61 msg = message
62 else:
63 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
64 self.url = url
65 BBFetchException.__init__(self, msg)
66 self.args = (url,)
67
68class FetchError(BBFetchException):
69 """General fetcher exception when something happens incorrectly"""
70 def __init__(self, message, url = None):
71 if url:
72 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
73 else:
74 msg = "Fetcher failure: %s" % message
75 self.url = url
76 BBFetchException.__init__(self, msg)
77 self.args = (message, url)
78
79class ChecksumError(FetchError):
80 """Exception when mismatched checksum encountered"""
81 def __init__(self, message, url = None, checksum = None):
82 self.checksum = checksum
83 FetchError.__init__(self, message, url)
84
85class NoChecksumError(FetchError):
86 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
87
88class UnpackError(BBFetchException):
89 """General fetcher exception when something happens incorrectly when unpacking"""
90 def __init__(self, message, url):
91 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
92 self.url = url
93 BBFetchException.__init__(self, msg)
94 self.args = (message, url)
95
96class NoMethodError(BBFetchException):
97 """Exception raised when there is no method to obtain a supplied url or set of urls"""
98 def __init__(self, url):
99 msg = "Could not find a fetcher which supports the URL: '%s'" % url
100 self.url = url
101 BBFetchException.__init__(self, msg)
102 self.args = (url,)
103
104class MissingParameterError(BBFetchException):
105 """Exception raised when a fetch method is missing a critical parameter in the url"""
106 def __init__(self, missing, url):
107 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
108 self.url = url
109 self.missing = missing
110 BBFetchException.__init__(self, msg)
111 self.args = (missing, url)
112
113class ParameterError(BBFetchException):
114 """Exception raised when a url cannot be proccessed due to invalid parameters."""
115 def __init__(self, message, url):
116 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
117 self.url = url
118 BBFetchException.__init__(self, msg)
119 self.args = (message, url)
120
121class NetworkAccess(BBFetchException):
122 """Exception raised when network access is disabled but it is required."""
123 def __init__(self, url, cmd):
124 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
125 self.url = url
126 self.cmd = cmd
127 BBFetchException.__init__(self, msg)
128 self.args = (url, cmd)
129
130class NonLocalMethod(Exception):
131 def __init__(self):
132 Exception.__init__(self)
133
134
135class URI(object):
136 """
137 A class representing a generic URI, with methods for
138 accessing the URI components, and stringifies to the
139 URI.
140
141 It is constructed by calling it with a URI, or setting
142 the attributes manually:
143
144 uri = URI("http://example.com/")
145
146 uri = URI()
147 uri.scheme = 'http'
148 uri.hostname = 'example.com'
149 uri.path = '/'
150
151 It has the following attributes:
152
153 * scheme (read/write)
154 * userinfo (authentication information) (read/write)
155 * username (read/write)
156 * password (read/write)
157
158 Note, password is deprecated as of RFC 3986.
159
160 * hostname (read/write)
161 * port (read/write)
162 * hostport (read only)
163 "hostname:port", if both are set, otherwise just "hostname"
164 * path (read/write)
165 * path_quoted (read/write)
166 A URI quoted version of path
167 * params (dict) (read/write)
168 * query (dict) (read/write)
169 * relative (bool) (read only)
170 True if this is a "relative URI", (e.g. file:foo.diff)
171
172 It stringifies to the URI itself.
173
174 Some notes about relative URIs: while it's specified that
175 a URI beginning with <scheme>:// should either be directly
176 followed by a hostname or a /, the old URI handling of the
177 fetch2 library did not comform to this. Therefore, this URI
178 class has some kludges to make sure that URIs are parsed in
179 a way comforming to bitbake's current usage. This URI class
180 supports the following:
181
182 file:relative/path.diff (IETF compliant)
183 git:relative/path.git (IETF compliant)
184 git:///absolute/path.git (IETF compliant)
185 file:///absolute/path.diff (IETF compliant)
186
187 file://relative/path.diff (not IETF compliant)
188
189 But it does not support the following:
190
191 file://hostname/absolute/path.diff (would be IETF compliant)
192
193 Note that the last case only applies to a list of
194 "whitelisted" schemes (currently only file://), that requires
195 its URIs to not have a network location.
196 """
197
198 _relative_schemes = ['file', 'git']
199 _netloc_forbidden = ['file']
200
201 def __init__(self, uri=None):
202 self.scheme = ''
203 self.userinfo = ''
204 self.hostname = ''
205 self.port = None
206 self._path = ''
207 self.params = {}
208 self.query = {}
209 self.relative = False
210
211 if not uri:
212 return
213
214 # We hijack the URL parameters, since the way bitbake uses
215 # them are not quite RFC compliant.
216 uri, param_str = (uri.split(";", 1) + [None])[:2]
217
218 urlp = urlparse.urlparse(uri)
219 self.scheme = urlp.scheme
220
221 reparse = 0
222
223 # Coerce urlparse to make URI scheme use netloc
224 if not self.scheme in urlparse.uses_netloc:
225 urlparse.uses_params.append(self.scheme)
226 reparse = 1
227
228 # Make urlparse happy(/ier) by converting local resources
229 # to RFC compliant URL format. E.g.:
230 # file://foo.diff -> file:foo.diff
231 if urlp.scheme in self._netloc_forbidden:
232 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
233 reparse = 1
234
235 if reparse:
236 urlp = urlparse.urlparse(uri)
237
238 # Identify if the URI is relative or not
239 if urlp.scheme in self._relative_schemes and \
240 re.compile("^\w+:(?!//)").match(uri):
241 self.relative = True
242
243 if not self.relative:
244 self.hostname = urlp.hostname or ''
245 self.port = urlp.port
246
247 self.userinfo += urlp.username or ''
248
249 if urlp.password:
250 self.userinfo += ':%s' % urlp.password
251
252 self.path = urllib.unquote(urlp.path)
253
254 if param_str:
255 self.params = self._param_str_split(param_str, ";")
256 if urlp.query:
257 self.query = self._param_str_split(urlp.query, "&")
258
259 def __str__(self):
260 userinfo = self.userinfo
261 if userinfo:
262 userinfo += '@'
263
264 return "%s:%s%s%s%s%s%s" % (
265 self.scheme,
266 '' if self.relative else '//',
267 userinfo,
268 self.hostport,
269 self.path_quoted,
270 self._query_str(),
271 self._param_str())
272
273 def _param_str(self):
274 return (
275 ''.join([';', self._param_str_join(self.params, ";")])
276 if self.params else '')
277
278 def _query_str(self):
279 return (
280 ''.join(['?', self._param_str_join(self.query, "&")])
281 if self.query else '')
282
283 def _param_str_split(self, string, elmdelim, kvdelim="="):
284 ret = {}
285 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
286 ret[k] = v
287 return ret
288
289 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
290 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
291
292 @property
293 def hostport(self):
294 if not self.port:
295 return self.hostname
296 return "%s:%d" % (self.hostname, self.port)
297
298 @property
299 def path_quoted(self):
300 return urllib.quote(self.path)
301
302 @path_quoted.setter
303 def path_quoted(self, path):
304 self.path = urllib.unquote(path)
305
306 @property
307 def path(self):
308 return self._path
309
310 @path.setter
311 def path(self, path):
312 self._path = path
313
314 if re.compile("^/").match(path):
315 self.relative = False
316 else:
317 self.relative = True
318
319 @property
320 def username(self):
321 if self.userinfo:
322 return (self.userinfo.split(":", 1))[0]
323 return ''
324
325 @username.setter
326 def username(self, username):
327 password = self.password
328 self.userinfo = username
329 if password:
330 self.userinfo += ":%s" % password
331
332 @property
333 def password(self):
334 if self.userinfo and ":" in self.userinfo:
335 return (self.userinfo.split(":", 1))[1]
336 return ''
337
338 @password.setter
339 def password(self, password):
340 self.userinfo = "%s:%s" % (self.username, password)
341
342def decodeurl(url):
343 """Decodes an URL into the tokens (scheme, network location, path,
344 user, password, parameters).
345 """
346
347 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
348 if not m:
349 raise MalformedUrl(url)
350
351 type = m.group('type')
352 location = m.group('location')
353 if not location:
354 raise MalformedUrl(url)
355 user = m.group('user')
356 parm = m.group('parm')
357
358 locidx = location.find('/')
359 if locidx != -1 and type.lower() != 'file':
360 host = location[:locidx]
361 path = location[locidx:]
362 else:
363 host = ""
364 path = location
365 if user:
366 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
367 if m:
368 user = m.group('user')
369 pswd = m.group('pswd')
370 else:
371 user = ''
372 pswd = ''
373
374 p = {}
375 if parm:
376 for s in parm.split(';'):
377 if s:
378 if not '=' in s:
379 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
380 s1, s2 = s.split('=')
381 p[s1] = s2
382
383 return type, host, urllib.unquote(path), user, pswd, p
384
385def encodeurl(decoded):
386 """Encodes a URL from tokens (scheme, network location, path,
387 user, password, parameters).
388 """
389
390 type, host, path, user, pswd, p = decoded
391
392 if not path:
393 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
394 if not type:
395 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
396 url = '%s://' % type
397 if user and type != "file":
398 url += "%s" % user
399 if pswd:
400 url += ":%s" % pswd
401 url += "@"
402 if host and type != "file":
403 url += "%s" % host
404 # Standardise path to ensure comparisons work
405 while '//' in path:
406 path = path.replace("//", "/")
407 url += "%s" % urllib.quote(path)
408 if p:
409 for parm in p:
410 url += ";%s=%s" % (parm, p[parm])
411
412 return url
413
414def uri_replace(ud, uri_find, uri_replace, replacements, d):
415 if not ud.url or not uri_find or not uri_replace:
416 logger.error("uri_replace: passed an undefined value, not replacing")
417 return None
418 uri_decoded = list(decodeurl(ud.url))
419 uri_find_decoded = list(decodeurl(uri_find))
420 uri_replace_decoded = list(decodeurl(uri_replace))
421 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
422 result_decoded = ['', '', '', '', '', {}]
423 for loc, i in enumerate(uri_find_decoded):
424 result_decoded[loc] = uri_decoded[loc]
425 regexp = i
426 if loc == 0 and regexp and not regexp.endswith("$"):
427 # Leaving the type unanchored can mean "https" matching "file" can become "files"
428 # which is clearly undesirable.
429 regexp += "$"
430 if loc == 5:
431 # Handle URL parameters
432 if i:
433 # Any specified URL parameters must match
434 for k in uri_replace_decoded[loc]:
435 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
436 return None
437 # Overwrite any specified replacement parameters
438 for k in uri_replace_decoded[loc]:
439 for l in replacements:
440 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
441 result_decoded[loc][k] = uri_replace_decoded[loc][k]
442 elif (re.match(regexp, uri_decoded[loc])):
443 if not uri_replace_decoded[loc]:
444 result_decoded[loc] = ""
445 else:
446 for k in replacements:
447 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
448 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
449 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
450 if loc == 2:
451 # Handle path manipulations
452 basename = None
453 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
454 # If the source and destination url types differ, must be a mirrortarball mapping
455 basename = os.path.basename(ud.mirrortarball)
456 # Kill parameters, they make no sense for mirror tarballs
457 uri_decoded[5] = {}
458 elif ud.localpath and ud.method.supports_checksum(ud):
459 basename = os.path.basename(ud.localpath)
460 if basename and not result_decoded[loc].endswith(basename):
461 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
462 else:
463 return None
464 result = encodeurl(result_decoded)
465 if result == ud.url:
466 return None
467 logger.debug(2, "For url %s returning %s" % (ud.url, result))
468 return result
469
470methods = []
471urldata_cache = {}
472saved_headrevs = {}
473
474def fetcher_init(d):
475 """
476 Called to initialize the fetchers once the configuration data is known.
477 Calls before this must not hit the cache.
478 """
479 # When to drop SCM head revisions controlled by user policy
480 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
481 if srcrev_policy == "cache":
482 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
483 elif srcrev_policy == "clear":
484 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
485 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
486 try:
487 bb.fetch2.saved_headrevs = revs.items()
488 except:
489 pass
490 revs.clear()
491 else:
492 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
493
494 _checksum_cache.init_cache(d)
495
496 for m in methods:
497 if hasattr(m, "init"):
498 m.init(d)
499
500def fetcher_parse_save(d):
501 _checksum_cache.save_extras(d)
502
503def fetcher_parse_done(d):
504 _checksum_cache.save_merge(d)
505
506def fetcher_compare_revisions(d):
507 """
508 Compare the revisions in the persistant cache with current values and
509 return true/false on whether they've changed.
510 """
511
512 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
513 data2 = bb.fetch2.saved_headrevs
514
515 changed = False
516 for key in data:
517 if key not in data2 or data2[key] != data[key]:
518 logger.debug(1, "%s changed", key)
519 changed = True
520 return True
521 else:
522 logger.debug(2, "%s did not change", key)
523 return False
524
525def mirror_from_string(data):
526 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
527
528def verify_checksum(ud, d):
529 """
530 verify the MD5 and SHA256 checksum for downloaded src
531
532 Raises a FetchError if one or both of the SRC_URI checksums do not match
533 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
534 checksums specified.
535
536 """
537
538 if not ud.method.supports_checksum(ud):
539 return
540
541 md5data = bb.utils.md5_file(ud.localpath)
542 sha256data = bb.utils.sha256_file(ud.localpath)
543
544 if ud.method.recommends_checksum(ud):
545 # If strict checking enabled and neither sum defined, raise error
546 strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
547 if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
548 logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
549 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
550 (ud.localpath, ud.md5_name, md5data,
551 ud.sha256_name, sha256data))
552 raise NoChecksumError('Missing SRC_URI checksum', ud.url)
553
554 # Log missing sums so user can more easily add them
555 if not ud.md5_expected:
556 logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
557 'SRC_URI[%s] = "%s"',
558 ud.localpath, ud.md5_name, md5data)
559
560 if not ud.sha256_expected:
561 logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
562 'SRC_URI[%s] = "%s"',
563 ud.localpath, ud.sha256_name, sha256data)
564
565 md5mismatch = False
566 sha256mismatch = False
567
568 if ud.md5_expected != md5data:
569 md5mismatch = True
570
571 if ud.sha256_expected != sha256data:
572 sha256mismatch = True
573
574 # We want to alert the user if a checksum is defined in the recipe but
575 # it does not match.
576 msg = ""
577 mismatch = False
578 if md5mismatch and ud.md5_expected:
579 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
580 mismatch = True;
581
582 if sha256mismatch and ud.sha256_expected:
583 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
584 mismatch = True;
585
586 if mismatch:
587 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
588
589 if len(msg):
590 raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
591
592
593def update_stamp(ud, d):
594 """
595 donestamp is file stamp indicating the whole fetching is done
596 this function update the stamp after verifying the checksum
597 """
598 if os.path.exists(ud.donestamp):
599 # Touch the done stamp file to show active use of the download
600 try:
601 os.utime(ud.donestamp, None)
602 except:
603 # Errors aren't fatal here
604 pass
605 else:
606 verify_checksum(ud, d)
607 open(ud.donestamp, 'w').close()
608
609def subprocess_setup():
610 # Python installs a SIGPIPE handler by default. This is usually not what
611 # non-Python subprocesses expect.
612 # SIGPIPE errors are known issues with gzip/bash
613 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
614
615def get_autorev(d):
616 # only not cache src rev in autorev case
617 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
618 d.setVar('__BB_DONT_CACHE', '1')
619 return "AUTOINC"
620
621def get_srcrev(d):
622 """
623 Return the version string for the current package
624 (usually to be used as PV)
625 Most packages usually only have one SCM so we just pass on the call.
626 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
627 have been set.
628 """
629
630 scms = []
631 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
632 urldata = fetcher.ud
633 for u in urldata:
634 if urldata[u].method.supports_srcrev():
635 scms.append(u)
636
637 if len(scms) == 0:
638 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
639
640 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
641 autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
642 if len(rev) > 10:
643 rev = rev[:10]
644 if autoinc:
645 return "AUTOINC+" + rev
646 return rev
647
648 #
649 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
650 #
651 format = d.getVar('SRCREV_FORMAT', True)
652 if not format:
653 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
654
655 seenautoinc = False
656 for scm in scms:
657 ud = urldata[scm]
658 for name in ud.names:
659 autoinc, rev = ud.method.sortable_revision(ud, d, name)
660 seenautoinc = seenautoinc or autoinc
661 if len(rev) > 10:
662 rev = rev[:10]
663 format = format.replace(name, rev)
664 if seenautoinc:
665 format = "AUTOINC+" + format
666
667 return format
668
669def localpath(url, d):
670 fetcher = bb.fetch2.Fetch([url], d)
671 return fetcher.localpath(url)
672
673def runfetchcmd(cmd, d, quiet = False, cleanup = []):
674 """
675 Run cmd returning the command output
676 Raise an error if interrupted or cmd fails
677 Optionally echo command output to stdout
678 Optionally remove the files/directories listed in cleanup upon failure
679 """
680
681 # Need to export PATH as binary could be in metadata paths
682 # rather than host provided
683 # Also include some other variables.
684 # FIXME: Should really include all export varaiables?
685 exportvars = ['HOME', 'PATH',
686 'HTTP_PROXY', 'http_proxy',
687 'HTTPS_PROXY', 'https_proxy',
688 'FTP_PROXY', 'ftp_proxy',
689 'FTPS_PROXY', 'ftps_proxy',
690 'NO_PROXY', 'no_proxy',
691 'ALL_PROXY', 'all_proxy',
692 'GIT_PROXY_COMMAND',
693 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
694 'SOCKS5_USER', 'SOCKS5_PASSWD']
695
696 for var in exportvars:
697 val = d.getVar(var, True)
698 if val:
699 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
700
701 logger.debug(1, "Running %s", cmd)
702
703 success = False
704 error_message = ""
705
706 try:
707 (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
708 success = True
709 except bb.process.NotFoundError as e:
710 error_message = "Fetch command %s" % (e.command)
711 except bb.process.ExecutionError as e:
712 if e.stdout:
713 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
714 elif e.stderr:
715 output = "output:\n%s" % e.stderr
716 else:
717 output = "no output"
718 error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
719 except bb.process.CmdError as e:
720 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
721 if not success:
722 for f in cleanup:
723 try:
724 bb.utils.remove(f, True)
725 except OSError:
726 pass
727
728 raise FetchError(error_message)
729
730 return output
731
732def check_network_access(d, info = "", url = None):
733 """
734 log remote network access, and error if BB_NO_NETWORK is set
735 """
736 if d.getVar("BB_NO_NETWORK", True) == "1":
737 raise NetworkAccess(url, info)
738 else:
739 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
740
741def build_mirroruris(origud, mirrors, ld):
742 uris = []
743 uds = []
744
745 replacements = {}
746 replacements["TYPE"] = origud.type
747 replacements["HOST"] = origud.host
748 replacements["PATH"] = origud.path
749 replacements["BASENAME"] = origud.path.split("/")[-1]
750 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
751
752 def adduri(ud, uris, uds):
753 for line in mirrors:
754 try:
755 (find, replace) = line
756 except ValueError:
757 continue
758 newuri = uri_replace(ud, find, replace, replacements, ld)
759 if not newuri or newuri in uris or newuri == origud.url:
760 continue
761 try:
762 newud = FetchData(newuri, ld)
763 newud.setup_localpath(ld)
764 except bb.fetch2.BBFetchException as e:
765 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
766 logger.debug(1, str(e))
767 try:
768 ud.method.clean(ud, ld)
769 except UnboundLocalError:
770 pass
771 continue
772 uris.append(newuri)
773 uds.append(newud)
774
775 adduri(newud, uris, uds)
776
777 adduri(origud, uris, uds)
778
779 return uris, uds
780
781def rename_bad_checksum(ud, suffix):
782 """
783 Renames files to have suffix from parameter
784 """
785
786 if ud.localpath is None:
787 return
788
789 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
790 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
791 bb.utils.movefile(ud.localpath, new_localpath)
792
793
794def try_mirror_url(origud, ud, ld, check = False):
795 # Return of None or a value means we're finished
796 # False means try another url
797 try:
798 if check:
799 found = ud.method.checkstatus(ud, ld)
800 if found:
801 return found
802 return False
803
804 os.chdir(ld.getVar("DL_DIR", True))
805
806 if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
807 ud.method.download(ud, ld)
808 if hasattr(ud.method,"build_mirror_data"):
809 ud.method.build_mirror_data(ud, ld)
810
811 if not ud.localpath or not os.path.exists(ud.localpath):
812 return False
813
814 if ud.localpath == origud.localpath:
815 return ud.localpath
816
817 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
818 # If that tarball is a local file:// we need to provide a symlink to it
819 dldir = ld.getVar("DL_DIR", True)
820 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
821 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
822 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
823 open(ud.donestamp, 'w').close()
824 dest = os.path.join(dldir, os.path.basename(ud.localpath))
825 if not os.path.exists(dest):
826 os.symlink(ud.localpath, dest)
827 if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
828 origud.method.download(origud, ld)
829 if hasattr(origud.method,"build_mirror_data"):
830 origud.method.build_mirror_data(origud, ld)
831 return ud.localpath
832 # Otherwise the result is a local file:// and we symlink to it
833 if not os.path.exists(origud.localpath):
834 if os.path.islink(origud.localpath):
835 # Broken symbolic link
836 os.unlink(origud.localpath)
837
838 os.symlink(ud.localpath, origud.localpath)
839 update_stamp(origud, ld)
840 return ud.localpath
841
842 except bb.fetch2.NetworkAccess:
843 raise
844
845 except bb.fetch2.BBFetchException as e:
846 if isinstance(e, ChecksumError):
847 logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
848 logger.warn(str(e))
849 rename_bad_checksum(ud, e.checksum)
850 elif isinstance(e, NoChecksumError):
851 raise
852 else:
853 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
854 logger.debug(1, str(e))
855 try:
856 ud.method.clean(ud, ld)
857 except UnboundLocalError:
858 pass
859 return False
860
861def try_mirrors(d, origud, mirrors, check = False):
862 """
863 Try to use a mirrored version of the sources.
864 This method will be automatically called before the fetchers go.
865
866 d Is a bb.data instance
867 uri is the original uri we're trying to download
868 mirrors is the list of mirrors we're going to try
869 """
870 ld = d.createCopy()
871
872 uris, uds = build_mirroruris(origud, mirrors, ld)
873
874 for index, uri in enumerate(uris):
875 ret = try_mirror_url(origud, uds[index], ld, check)
876 if ret != False:
877 return ret
878 return None
879
880def srcrev_internal_helper(ud, d, name):
881 """
882 Return:
883 a) a source revision if specified
884 b) latest revision if SRCREV="AUTOINC"
885 c) None if not specified
886 """
887
888 srcrev = None
889 pn = d.getVar("PN", True)
890 attempts = []
891 if name != '' and pn:
892 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
893 if name != '':
894 attempts.append("SRCREV_%s" % name)
895 if pn:
896 attempts.append("SRCREV_pn-%s" % pn)
897 attempts.append("SRCREV")
898
899 for a in attempts:
900 srcrev = d.getVar(a, True)
901 if srcrev and srcrev != "INVALID":
902 break
903
904 if 'rev' in ud.parm and 'tag' in ud.parm:
905 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
906
907 if 'rev' in ud.parm or 'tag' in ud.parm:
908 if 'rev' in ud.parm:
909 parmrev = ud.parm['rev']
910 else:
911 parmrev = ud.parm['tag']
912 if srcrev == "INVALID" or not srcrev:
913 return parmrev
914 if srcrev != parmrev:
915 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
916 return parmrev
917
918 if srcrev == "INVALID" or not srcrev:
919 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
920 if srcrev == "AUTOINC":
921 srcrev = ud.method.latest_revision(ud, d, name)
922
923 return srcrev
924
925def get_checksum_file_list(d):
926 """ Get a list of files checksum in SRC_URI
927
928 Returns the resolved local paths of all local file entries in
929 SRC_URI as a space-separated string
930 """
931 fetch = Fetch([], d, cache = False, localonly = True)
932
933 dl_dir = d.getVar('DL_DIR', True)
934 filelist = []
935 for u in fetch.urls:
936 ud = fetch.ud[u]
937
938 if ud and isinstance(ud.method, local.Local):
939 paths = ud.method.localpaths(ud, d)
940 for f in paths:
941 pth = ud.decodedurl
942 if '*' in pth:
943 f = os.path.join(os.path.abspath(f), pth)
944 if f.startswith(dl_dir):
945 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
946 if os.path.exists(f):
947 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
948 else:
949 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
950 filelist.append(f + ":" + str(os.path.exists(f)))
951
952 return " ".join(filelist)
953
954def get_file_checksums(filelist, pn):
955 """Get a list of the checksums for a list of local files
956
957 Returns the checksums for a list of local files, caching the results as
958 it proceeds
959
960 """
961
962 def checksum_file(f):
963 try:
964 checksum = _checksum_cache.get_checksum(f)
965 except OSError as e:
966 bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
967 return None
968 return checksum
969
970 def checksum_dir(pth):
971 # Handle directories recursively
972 dirchecksums = []
973 for root, dirs, files in os.walk(pth):
974 for name in files:
975 fullpth = os.path.join(root, name)
976 checksum = checksum_file(fullpth)
977 if checksum:
978 dirchecksums.append((fullpth, checksum))
979 return dirchecksums
980
981 checksums = []
982 for pth in filelist.split():
983 exist = pth.split(":")[1]
984 if exist == "False":
985 continue
986 pth = pth.split(":")[0]
987 if '*' in pth:
988 # Handle globs
989 for f in glob.glob(pth):
990 if os.path.isdir(f):
991 checksums.extend(checksum_dir(f))
992 else:
993 checksum = checksum_file(f)
994 checksums.append((f, checksum))
995 elif os.path.isdir(pth):
996 checksums.extend(checksum_dir(pth))
997 else:
998 checksum = checksum_file(pth)
999 checksums.append((pth, checksum))
1000
1001 checksums.sort(key=operator.itemgetter(1))
1002 return checksums
1003
1004
1005class FetchData(object):
1006 """
1007 A class which represents the fetcher state for a given URI.
1008 """
1009 def __init__(self, url, d, localonly = False):
1010 # localpath is the location of a downloaded result. If not set, the file is local.
1011 self.donestamp = None
1012 self.localfile = ""
1013 self.localpath = None
1014 self.lockfile = None
1015 self.mirrortarball = None
1016 self.basename = None
1017 self.basepath = None
1018 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
1019 self.date = self.getSRCDate(d)
1020 self.url = url
1021 if not self.user and "user" in self.parm:
1022 self.user = self.parm["user"]
1023 if not self.pswd and "pswd" in self.parm:
1024 self.pswd = self.parm["pswd"]
1025 self.setup = False
1026
1027 if "name" in self.parm:
1028 self.md5_name = "%s.md5sum" % self.parm["name"]
1029 self.sha256_name = "%s.sha256sum" % self.parm["name"]
1030 else:
1031 self.md5_name = "md5sum"
1032 self.sha256_name = "sha256sum"
1033 if self.md5_name in self.parm:
1034 self.md5_expected = self.parm[self.md5_name]
1035 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1036 self.md5_expected = None
1037 else:
1038 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
1039 if self.sha256_name in self.parm:
1040 self.sha256_expected = self.parm[self.sha256_name]
1041 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1042 self.sha256_expected = None
1043 else:
1044 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
1045
1046 self.names = self.parm.get("name",'default').split(',')
1047
1048 self.method = None
1049 for m in methods:
1050 if m.supports(self, d):
1051 self.method = m
1052 break
1053
1054 if not self.method:
1055 raise NoMethodError(url)
1056
1057 if localonly and not isinstance(self.method, local.Local):
1058 raise NonLocalMethod()
1059
1060 if self.parm.get("proto", None) and "protocol" not in self.parm:
1061 logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
1062 self.parm["protocol"] = self.parm.get("proto", None)
1063
1064 if hasattr(self.method, "urldata_init"):
1065 self.method.urldata_init(self, d)
1066
1067 if "localpath" in self.parm:
1068 # if user sets localpath for file, use it instead.
1069 self.localpath = self.parm["localpath"]
1070 self.basename = os.path.basename(self.localpath)
1071 elif self.localfile:
1072 self.localpath = self.method.localpath(self, d)
1073
1074 dldir = d.getVar("DL_DIR", True)
1075 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1076 if self.localpath and self.localpath.startswith(dldir):
1077 basepath = self.localpath
1078 elif self.localpath:
1079 basepath = dldir + os.sep + os.path.basename(self.localpath)
1080 else:
1081 basepath = dldir + os.sep + (self.basepath or self.basename)
1082 self.donestamp = basepath + '.done'
1083 self.lockfile = basepath + '.lock'
1084
1085 def setup_revisons(self, d):
1086 self.revisions = {}
1087 for name in self.names:
1088 self.revisions[name] = srcrev_internal_helper(self, d, name)
1089
1090 # add compatibility code for non name specified case
1091 if len(self.names) == 1:
1092 self.revision = self.revisions[self.names[0]]
1093
1094 def setup_localpath(self, d):
1095 if not self.localpath:
1096 self.localpath = self.method.localpath(self, d)
1097
1098 def getSRCDate(self, d):
1099 """
1100 Return the SRC Date for the component
1101
1102 d the bb.data module
1103 """
1104 if "srcdate" in self.parm:
1105 return self.parm['srcdate']
1106
1107 pn = d.getVar("PN", True)
1108
1109 if pn:
1110 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1111
1112 return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1113
1114class FetchMethod(object):
1115 """Base class for 'fetch'ing data"""
1116
1117 def __init__(self, urls = []):
1118 self.urls = []
1119
1120 def supports(self, urldata, d):
1121 """
1122 Check to see if this fetch class supports a given url.
1123 """
1124 return 0
1125
1126 def localpath(self, urldata, d):
1127 """
1128 Return the local filename of a given url assuming a successful fetch.
1129 Can also setup variables in urldata for use in go (saving code duplication
1130 and duplicate code execution)
1131 """
1132 return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
1133
1134 def supports_checksum(self, urldata):
1135 """
1136 Is localpath something that can be represented by a checksum?
1137 """
1138
1139 # We cannot compute checksums for directories
1140 if os.path.isdir(urldata.localpath) == True:
1141 return False
1142 if urldata.localpath.find("*") != -1:
1143 return False
1144
1145 return True
1146
1147 def recommends_checksum(self, urldata):
1148 """
1149 Is the backend on where checksumming is recommended (should warnings
1150 be displayed if there is no checksum)?
1151 """
1152 return False
1153
1154 def _strip_leading_slashes(self, relpath):
1155 """
1156 Remove leading slash as os.path.join can't cope
1157 """
1158 while os.path.isabs(relpath):
1159 relpath = relpath[1:]
1160 return relpath
1161
1162 def setUrls(self, urls):
1163 self.__urls = urls
1164
1165 def getUrls(self):
1166 return self.__urls
1167
1168 urls = property(getUrls, setUrls, None, "Urls property")
1169
1170 def need_update(self, ud, d):
1171 """
1172 Force a fetch, even if localpath exists?
1173 """
1174 if os.path.exists(ud.localpath):
1175 return False
1176 return True
1177
1178 def supports_srcrev(self):
1179 """
1180 The fetcher supports auto source revisions (SRCREV)
1181 """
1182 return False
1183
1184 def download(self, urldata, d):
1185 """
1186 Fetch urls
1187 Assumes localpath was called first
1188 """
1189 raise NoMethodError(url)
1190
1191 def unpack(self, urldata, rootdir, data):
1192 iterate = False
1193 file = urldata.localpath
1194
1195 try:
1196 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1197 except ValueError as exc:
1198 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1199 (file, urldata.parm.get('unpack')))
1200
1201 dots = file.split(".")
1202 if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
1203 efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
1204 else:
1205 efile = file
1206 cmd = None
1207
1208 if unpack:
1209 if file.endswith('.tar'):
1210 cmd = 'tar x --no-same-owner -f %s' % file
1211 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1212 cmd = 'tar xz --no-same-owner -f %s' % file
1213 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1214 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1215 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1216 cmd = 'gzip -dc %s > %s' % (file, efile)
1217 elif file.endswith('.bz2'):
1218 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1219 elif file.endswith('.tar.xz'):
1220 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1221 elif file.endswith('.xz'):
1222 cmd = 'xz -dc %s > %s' % (file, efile)
1223 elif file.endswith('.zip') or file.endswith('.jar'):
1224 try:
1225 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1226 except ValueError as exc:
1227 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1228 (file, urldata.parm.get('dos')))
1229 cmd = 'unzip -q -o'
1230 if dos:
1231 cmd = '%s -a' % cmd
1232 cmd = "%s '%s'" % (cmd, file)
1233 elif file.endswith('.rpm') or file.endswith('.srpm'):
1234 if 'extract' in urldata.parm:
1235 unpack_file = urldata.parm.get('extract')
1236 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1237 iterate = True
1238 iterate_file = unpack_file
1239 else:
1240 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1241 elif file.endswith('.deb') or file.endswith('.ipk'):
1242 cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
1243
1244 if not unpack or not cmd:
1245 # If file == dest, then avoid any copies, as we already put the file into dest!
1246 dest = os.path.join(rootdir, os.path.basename(file))
1247 if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1248 if os.path.isdir(file):
1249 # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
1250 basepath = getattr(urldata, "basepath", None)
1251 destdir = "."
1252 if basepath and basepath.endswith("/"):
1253 basepath = basepath.rstrip("/")
1254 elif basepath:
1255 basepath = os.path.dirname(basepath)
1256 if basepath and basepath.find("/") != -1:
1257 destdir = basepath[:basepath.rfind('/')]
1258 destdir = destdir.strip('/')
1259 if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
1260 os.makedirs("%s/%s" % (rootdir, destdir))
1261 cmd = 'cp -fpPR %s %s/%s/' % (file, rootdir, destdir)
1262 #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
1263 else:
1264 # The "destdir" handling was specifically done for FILESPATH
1265 # items. So, only do so for file:// entries.
1266 if urldata.type == "file" and urldata.path.find("/") != -1:
1267 destdir = urldata.path.rsplit("/", 1)[0]
1268 if urldata.parm.get('subdir') != None:
1269 destdir = urldata.parm.get('subdir') + "/" + destdir
1270 else:
1271 if urldata.parm.get('subdir') != None:
1272 destdir = urldata.parm.get('subdir')
1273 else:
1274 destdir = "."
1275 bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
1276 cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir)
1277
1278 if not cmd:
1279 return
1280
1281 # Change to subdir before executing command
1282 save_cwd = os.getcwd();
1283 os.chdir(rootdir)
1284 if 'subdir' in urldata.parm:
1285 newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
1286 bb.utils.mkdirhier(newdir)
1287 os.chdir(newdir)
1288
1289 path = data.getVar('PATH', True)
1290 if path:
1291 cmd = "PATH=\"%s\" %s" % (path, cmd)
1292 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
1293 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
1294
1295 os.chdir(save_cwd)
1296
1297 if ret != 0:
1298 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1299
1300 if iterate is True:
1301 iterate_urldata = urldata
1302 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1303 self.unpack(urldata, rootdir, data)
1304
1305 return
1306
1307 def clean(self, urldata, d):
1308 """
1309 Clean any existing full or partial download
1310 """
1311 bb.utils.remove(urldata.localpath)
1312
1313 def try_premirror(self, urldata, d):
1314 """
1315 Should premirrors be used?
1316 """
1317 return True
1318
1319 def checkstatus(self, urldata, d):
1320 """
1321 Check the status of a URL
1322 Assumes localpath was called first
1323 """
1324 logger.info("URL %s could not be checked for status since no method exists.", url)
1325 return True
1326
1327 def latest_revision(self, ud, d, name):
1328 """
1329 Look in the cache for the latest revision, if not present ask the SCM.
1330 """
1331 if not hasattr(self, "_latest_revision"):
1332 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1333
1334 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1335 key = self.generate_revision_key(ud, d, name)
1336 try:
1337 return revs[key]
1338 except KeyError:
1339 revs[key] = rev = self._latest_revision(ud, d, name)
1340 return rev
1341
1342 def sortable_revision(self, ud, d, name):
1343 latest_rev = self._build_revision(ud, d, name)
1344 return True, str(latest_rev)
1345
1346 def generate_revision_key(self, ud, d, name):
1347 key = self._revision_key(ud, d, name)
1348 return "%s-%s" % (key, d.getVar("PN", True) or "")
1349
1350class Fetch(object):
1351 def __init__(self, urls, d, cache = True, localonly = False):
1352 if localonly and cache:
1353 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1354
1355 if len(urls) == 0:
1356 urls = d.getVar("SRC_URI", True).split()
1357 self.urls = urls
1358 self.d = d
1359 self.ud = {}
1360
1361 fn = d.getVar('FILE', True)
1362 if cache and fn and fn in urldata_cache:
1363 self.ud = urldata_cache[fn]
1364
1365 for url in urls:
1366 if url not in self.ud:
1367 try:
1368 self.ud[url] = FetchData(url, d, localonly)
1369 except NonLocalMethod:
1370 if localonly:
1371 self.ud[url] = None
1372 pass
1373
1374 if fn and cache:
1375 urldata_cache[fn] = self.ud
1376
1377 def localpath(self, url):
1378 if url not in self.urls:
1379 self.ud[url] = FetchData(url, self.d)
1380
1381 self.ud[url].setup_localpath(self.d)
1382 return self.d.expand(self.ud[url].localpath)
1383
1384 def localpaths(self):
1385 """
1386 Return a list of the local filenames, assuming successful fetch
1387 """
1388 local = []
1389
1390 for u in self.urls:
1391 ud = self.ud[u]
1392 ud.setup_localpath(self.d)
1393 local.append(ud.localpath)
1394
1395 return local
1396
1397 def download(self, urls = []):
1398 """
1399 Fetch all urls
1400 """
1401 if len(urls) == 0:
1402 urls = self.urls
1403
1404 network = self.d.getVar("BB_NO_NETWORK", True)
1405 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1406
1407 for u in urls:
1408 ud = self.ud[u]
1409 ud.setup_localpath(self.d)
1410 m = ud.method
1411 localpath = ""
1412
1413 lf = bb.utils.lockfile(ud.lockfile)
1414
1415 try:
1416 self.d.setVar("BB_NO_NETWORK", network)
1417
1418 if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
1419 localpath = ud.localpath
1420 elif m.try_premirror(ud, self.d):
1421 logger.debug(1, "Trying PREMIRRORS")
1422 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1423 localpath = try_mirrors(self.d, ud, mirrors, False)
1424
1425 if premirroronly:
1426 self.d.setVar("BB_NO_NETWORK", "1")
1427
1428 os.chdir(self.d.getVar("DL_DIR", True))
1429
1430 firsterr = None
1431 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
1432 try:
1433 logger.debug(1, "Trying Upstream")
1434 m.download(ud, self.d)
1435 if hasattr(m, "build_mirror_data"):
1436 m.build_mirror_data(ud, self.d)
1437 localpath = ud.localpath
1438 # early checksum verify, so that if checksum mismatched,
1439 # fetcher still have chance to fetch from mirror
1440 update_stamp(ud, self.d)
1441
1442 except bb.fetch2.NetworkAccess:
1443 raise
1444
1445 except BBFetchException as e:
1446 if isinstance(e, ChecksumError):
1447 logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1448 logger.debug(1, str(e))
1449 rename_bad_checksum(ud, e.checksum)
1450 elif isinstance(e, NoChecksumError):
1451 raise
1452 else:
1453 logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1454 logger.debug(1, str(e))
1455 firsterr = e
1456 # Remove any incomplete fetch
1457 m.clean(ud, self.d)
1458 logger.debug(1, "Trying MIRRORS")
1459 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1460 localpath = try_mirrors (self.d, ud, mirrors)
1461
1462 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1463 if firsterr:
1464 logger.error(str(firsterr))
1465 raise FetchError("Unable to fetch URL from any source.", u)
1466
1467 update_stamp(ud, self.d)
1468
1469 except BBFetchException as e:
1470 if isinstance(e, ChecksumError):
1471 logger.error("Checksum failure fetching %s" % u)
1472 raise
1473
1474 finally:
1475 bb.utils.unlockfile(lf)
1476
1477 def checkstatus(self, urls = []):
1478 """
1479 Check all urls exist upstream
1480 """
1481
1482 if len(urls) == 0:
1483 urls = self.urls
1484
1485 for u in urls:
1486 ud = self.ud[u]
1487 ud.setup_localpath(self.d)
1488 m = ud.method
1489 logger.debug(1, "Testing URL %s", u)
1490 # First try checking uri, u, from PREMIRRORS
1491 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1492 ret = try_mirrors(self.d, ud, mirrors, True)
1493 if not ret:
1494 # Next try checking from the original uri, u
1495 try:
1496 ret = m.checkstatus(ud, self.d)
1497 except:
1498 # Finally, try checking uri, u, from MIRRORS
1499 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1500 ret = try_mirrors(self.d, ud, mirrors, True)
1501
1502 if not ret:
1503 raise FetchError("URL %s doesn't work" % u, u)
1504
1505 def unpack(self, root, urls = []):
1506 """
1507 Check all urls exist upstream
1508 """
1509
1510 if len(urls) == 0:
1511 urls = self.urls
1512
1513 for u in urls:
1514 ud = self.ud[u]
1515 ud.setup_localpath(self.d)
1516
1517 if self.d.expand(self.localpath) is None:
1518 continue
1519
1520 if ud.lockfile:
1521 lf = bb.utils.lockfile(ud.lockfile)
1522
1523 ud.method.unpack(ud, root, self.d)
1524
1525 if ud.lockfile:
1526 bb.utils.unlockfile(lf)
1527
1528 def clean(self, urls = []):
1529 """
1530 Clean files that the fetcher gets or places
1531 """
1532
1533 if len(urls) == 0:
1534 urls = self.urls
1535
1536 for url in urls:
1537 if url not in self.ud:
1538 self.ud[url] = FetchData(url, d)
1539 ud = self.ud[url]
1540 ud.setup_localpath(self.d)
1541
1542 if not ud.localfile and ud.localpath is None:
1543 continue
1544
1545 if ud.lockfile:
1546 lf = bb.utils.lockfile(ud.lockfile)
1547
1548 ud.method.clean(ud, self.d)
1549 if ud.donestamp:
1550 bb.utils.remove(ud.donestamp)
1551
1552 if ud.lockfile:
1553 bb.utils.unlockfile(lf)
1554
1555from . import cvs
1556from . import git
1557from . import gitsm
1558from . import gitannex
1559from . import local
1560from . import svn
1561from . import wget
1562from . import ssh
1563from . import sftp
1564from . import perforce
1565from . import bzr
1566from . import hg
1567from . import osc
1568from . import repo
1569from . import clearcase
1570
1571methods.append(local.Local())
1572methods.append(wget.Wget())
1573methods.append(svn.Svn())
1574methods.append(git.Git())
1575methods.append(gitsm.GitSM())
1576methods.append(gitannex.GitANNEX())
1577methods.append(cvs.Cvs())
1578methods.append(ssh.SSH())
1579methods.append(sftp.SFTP())
1580methods.append(perforce.Perforce())
1581methods.append(bzr.Bzr())
1582methods.append(hg.Hg())
1583methods.append(osc.Osc())
1584methods.append(repo.Repo())
1585methods.append(clearcase.ClearCase())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
new file mode 100644
index 0000000000..03e9ac461b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/bzr.py
@@ -0,0 +1,143 @@
1"""
2BitBake 'Fetch' implementation for bzr.
3
4"""
5
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 Richard Purdie
8#
9# Classes for obtaining upstream sources for the
10# BitBake build tools.
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import sys
28import logging
29import bb
30from bb import data
31from bb.fetch2 import FetchMethod
32from bb.fetch2 import FetchError
33from bb.fetch2 import runfetchcmd
34from bb.fetch2 import logger
35
36class Bzr(FetchMethod):
37 def supports(self, ud, d):
38 return ud.type in ['bzr']
39
40 def urldata_init(self, ud, d):
41 """
42 init bzr specific variable within url data
43 """
44 # Create paths to bzr checkouts
45 relpath = self._strip_leading_slashes(ud.path)
46 ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
47
48 ud.setup_revisons(d)
49
50 if not ud.revision:
51 ud.revision = self.latest_revision(ud, d)
52
53 ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
54
55 def _buildbzrcommand(self, ud, d, command):
56 """
57 Build up an bzr commandline based on ud
58 command is "fetch", "update", "revno"
59 """
60
61 basecmd = data.expand('${FETCHCMD_bzr}', d)
62
63 proto = ud.parm.get('protocol', 'http')
64
65 bzrroot = ud.host + ud.path
66
67 options = []
68
69 if command == "revno":
70 bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
71 else:
72 if ud.revision:
73 options.append("-r %s" % ud.revision)
74
75 if command == "fetch":
76 bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
77 elif command == "update":
78 bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
79 else:
80 raise FetchError("Invalid bzr command %s" % command, ud.url)
81
82 return bzrcmd
83
84 def download(self, ud, d):
85 """Fetch url"""
86
87 if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
88 bzrcmd = self._buildbzrcommand(ud, d, "update")
89 logger.debug(1, "BZR Update %s", ud.url)
90 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
91 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
92 runfetchcmd(bzrcmd, d)
93 else:
94 bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
95 bzrcmd = self._buildbzrcommand(ud, d, "fetch")
96 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
97 logger.debug(1, "BZR Checkout %s", ud.url)
98 bb.utils.mkdirhier(ud.pkgdir)
99 os.chdir(ud.pkgdir)
100 logger.debug(1, "Running %s", bzrcmd)
101 runfetchcmd(bzrcmd, d)
102
103 os.chdir(ud.pkgdir)
104
105 scmdata = ud.parm.get("scmdata", "")
106 if scmdata == "keep":
107 tar_flags = ""
108 else:
109 tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
110
111 # tar them up to a defined filename
112 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
113
114 def supports_srcrev(self):
115 return True
116
117 def _revision_key(self, ud, d, name):
118 """
119 Return a unique key for the url
120 """
121 return "bzr:" + ud.pkgdir
122
123 def _latest_revision(self, ud, d, name):
124 """
125 Return the latest upstream revision number
126 """
127 logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
128
129 bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
130
131 output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
132
133 return output.strip()
134
135 def sortable_revision(self, ud, d, name):
136 """
137 Return a sortable revision number which in our case is the revision number
138 """
139
140 return False, self._build_revision(ud, d)
141
142 def _build_revision(self, ud, d):
143 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py
new file mode 100644
index 0000000000..bfca2f7bcf
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/clearcase.py
@@ -0,0 +1,263 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' clearcase implementation
5
6The clearcase fetcher is used to retrieve files from a ClearCase repository.
7
8Usage in the recipe:
9
10 SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
11 SRCREV = "EXAMPLE_CLEARCASE_TAG"
12 PV = "${@d.getVar("SRCREV").replace("/", "+")}"
13
14The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
15
16Supported SRC_URI options are:
17
18- vob
19 (required) The name of the clearcase VOB (with prepending "/")
20
21- module
22 The module in the selected VOB (with prepending "/")
23
24 The module and vob parameters are combined to create
25 the following load rule in the view config spec:
26 load <vob><module>
27
28- proto
29 http or https
30
31Related variables:
32
33 CCASE_CUSTOM_CONFIG_SPEC
34 Write a config spec to this variable in your recipe to use it instead
35 of the default config spec generated by this fetcher.
36 Please note that the SRCREV loses its functionality if you specify
37 this variable. SRCREV is still used to label the archive after a fetch,
38 but it doesn't define what's fetched.
39
40User credentials:
41 cleartool:
42 The login of cleartool is handled by the system. No special steps needed.
43
44 rcleartool:
45 In order to use rcleartool with authenticated users an `rcleartool login` is
46 necessary before using the fetcher.
47"""
48# Copyright (C) 2014 Siemens AG
49#
50# This program is free software; you can redistribute it and/or modify
51# it under the terms of the GNU General Public License version 2 as
52# published by the Free Software Foundation.
53#
54# This program is distributed in the hope that it will be useful,
55# but WITHOUT ANY WARRANTY; without even the implied warranty of
56# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
57# GNU General Public License for more details.
58#
59# You should have received a copy of the GNU General Public License along
60# with this program; if not, write to the Free Software Foundation, Inc.,
61# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
62#
63
64import os
65import sys
66import shutil
67import bb
68from bb import data
69from bb.fetch2 import FetchMethod
70from bb.fetch2 import FetchError
71from bb.fetch2 import runfetchcmd
72from bb.fetch2 import logger
73from distutils import spawn
74
75class ClearCase(FetchMethod):
76 """Class to fetch urls via 'clearcase'"""
77 def init(self, d):
78 pass
79
80 def supports(self, ud, d):
81 """
82 Check to see if a given url can be fetched with Clearcase.
83 """
84 return ud.type in ['ccrc']
85
86 def debug(self, msg):
87 logger.debug(1, "ClearCase: %s", msg)
88
89 def urldata_init(self, ud, d):
90 """
91 init ClearCase specific variable within url data
92 """
93 ud.proto = "https"
94 if 'protocol' in ud.parm:
95 ud.proto = ud.parm['protocol']
96 if not ud.proto in ('http', 'https'):
97 raise fetch2.ParameterError("Invalid protocol type", ud.url)
98
99 ud.vob = ''
100 if 'vob' in ud.parm:
101 ud.vob = ud.parm['vob']
102 else:
103 msg = ud.url+": vob must be defined so the fetcher knows what to get."
104 raise MissingParameterError('vob', msg)
105
106 if 'module' in ud.parm:
107 ud.module = ud.parm['module']
108 else:
109 ud.module = ""
110
111 ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
112
113 if data.getVar("SRCREV", d, True) == "INVALID":
114 raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
115
116 ud.label = d.getVar("SRCREV")
117 ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
118
119 ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
120
121 ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
122 ud.module.replace("/", "."),
123 ud.label.replace("/", "."))
124
125 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
126 ud.csname = "%s-config-spec" % (ud.identifier)
127 ud.ccasedir = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
128 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
129 ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
130 ud.localfile = "%s.tar.gz" % (ud.identifier)
131
132 self.debug("host = %s" % ud.host)
133 self.debug("path = %s" % ud.path)
134 self.debug("server = %s" % ud.server)
135 self.debug("proto = %s" % ud.proto)
136 self.debug("type = %s" % ud.type)
137 self.debug("vob = %s" % ud.vob)
138 self.debug("module = %s" % ud.module)
139 self.debug("basecmd = %s" % ud.basecmd)
140 self.debug("label = %s" % ud.label)
141 self.debug("ccasedir = %s" % ud.ccasedir)
142 self.debug("viewdir = %s" % ud.viewdir)
143 self.debug("viewname = %s" % ud.viewname)
144 self.debug("configspecfile = %s" % ud.configspecfile)
145 self.debug("localfile = %s" % ud.localfile)
146
147 ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
148
149 def _build_ccase_command(self, ud, command):
150 """
151 Build up a commandline based on ud
152 command is: mkview, setcs, rmview
153 """
154 options = []
155
156 if "rcleartool" in ud.basecmd:
157 options.append("-server %s" % ud.server)
158
159 basecmd = "%s %s" % (ud.basecmd, command)
160
161 if command is 'mkview':
162 if not "rcleartool" in ud.basecmd:
163 # Cleartool needs a -snapshot view
164 options.append("-snapshot")
165 options.append("-tag %s" % ud.viewname)
166 options.append(ud.viewdir)
167
168 elif command is 'rmview':
169 options.append("-force")
170 options.append("%s" % ud.viewdir)
171
172 elif command is 'setcs':
173 options.append("-overwrite")
174 options.append(ud.configspecfile)
175
176 else:
177 raise FetchError("Invalid ccase command %s" % command)
178
179 ccasecmd = "%s %s" % (basecmd, " ".join(options))
180 self.debug("ccasecmd = %s" % ccasecmd)
181 return ccasecmd
182
183 def _write_configspec(self, ud, d):
184 """
185 Create config spec file (ud.configspecfile) for ccase view
186 """
187 config_spec = ""
188 custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
189 if custom_config_spec is not None:
190 for line in custom_config_spec.split("\\n"):
191 config_spec += line+"\n"
192 bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
193 else:
194 config_spec += "element * CHECKEDOUT\n"
195 config_spec += "element * %s\n" % ud.label
196 config_spec += "load %s%s\n" % (ud.vob, ud.module)
197
198 logger.info("Using config spec: \n%s" % config_spec)
199
200 with open(ud.configspecfile, 'w') as f:
201 f.write(config_spec)
202
203 def _remove_view(self, ud, d):
204 if os.path.exists(ud.viewdir):
205 os.chdir(ud.ccasedir)
206 cmd = self._build_ccase_command(ud, 'rmview');
207 logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
208 bb.fetch2.check_network_access(d, cmd, ud.url)
209 output = runfetchcmd(cmd, d)
210 logger.info("rmview output: %s", output)
211
212 def need_update(self, ud, d):
213 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
214 ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
215 return True
216 if os.path.exists(ud.localpath):
217 return False
218 return True
219
220 def supports_srcrev(self):
221 return True
222
223 def sortable_revision(self, ud, d, name):
224 return False, ud.identifier
225
226 def download(self, ud, d):
227 """Fetch url"""
228
229 # Make a fresh view
230 bb.utils.mkdirhier(ud.ccasedir)
231 self._write_configspec(ud, d)
232 cmd = self._build_ccase_command(ud, 'mkview')
233 logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
234 bb.fetch2.check_network_access(d, cmd, ud.url)
235 try:
236 runfetchcmd(cmd, d)
237 except FetchError as e:
238 if "CRCLI2008E" in e.msg:
239 raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
240 else:
241 raise e
242
243 # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
244 os.chdir(ud.viewdir)
245 cmd = self._build_ccase_command(ud, 'setcs');
246 logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
247 bb.fetch2.check_network_access(d, cmd, ud.url)
248 output = runfetchcmd(cmd, d)
249 logger.info("%s", output)
250
251 # Copy the configspec to the viewdir so we have it in our source tarball later
252 shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
253
254 # Clean clearcase meta-data before tar
255
256 runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
257
258 # Clean up so we can create a new view next time
259 self.clean(ud, d);
260
261 def clean(self, ud, d):
262 self._remove_view(ud, d)
263 bb.utils.remove(ud.configspecfile)
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
new file mode 100644
index 0000000000..d27d96f68c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -0,0 +1,171 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27#
28
29import os
30import logging
31import bb
32from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
33from bb.fetch2 import runfetchcmd
34
35class Cvs(FetchMethod):
36 """
37 Class to fetch a module or modules from cvs repositories
38 """
39 def supports(self, ud, d):
40 """
41 Check to see if a given url can be fetched with cvs.
42 """
43 return ud.type in ['cvs']
44
45 def urldata_init(self, ud, d):
46 if not "module" in ud.parm:
47 raise MissingParameterError("module", ud.url)
48 ud.module = ud.parm["module"]
49
50 ud.tag = ud.parm.get('tag', "")
51
52 # Override the default date in certain cases
53 if 'date' in ud.parm:
54 ud.date = ud.parm['date']
55 elif ud.tag:
56 ud.date = ""
57
58 norecurse = ''
59 if 'norecurse' in ud.parm:
60 norecurse = '_norecurse'
61
62 fullpath = ''
63 if 'fullpath' in ud.parm:
64 fullpath = '_fullpath'
65
66 ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
67
68 def need_update(self, ud, d):
69 if (ud.date == "now"):
70 return True
71 if not os.path.exists(ud.localpath):
72 return True
73 return False
74
75 def download(self, ud, d):
76
77 method = ud.parm.get('method', 'pserver')
78 localdir = ud.parm.get('localdir', ud.module)
79 cvs_port = ud.parm.get('port', '')
80
81 cvs_rsh = None
82 if method == "ext":
83 if "rsh" in ud.parm:
84 cvs_rsh = ud.parm["rsh"]
85
86 if method == "dir":
87 cvsroot = ud.path
88 else:
89 cvsroot = ":" + method
90 cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
91 if cvsproxyhost:
92 cvsroot += ";proxy=" + cvsproxyhost
93 cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
94 if cvsproxyport:
95 cvsroot += ";proxyport=" + cvsproxyport
96 cvsroot += ":" + ud.user
97 if ud.pswd:
98 cvsroot += ":" + ud.pswd
99 cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
100
101 options = []
102 if 'norecurse' in ud.parm:
103 options.append("-l")
104 if ud.date:
105 # treat YYYYMMDDHHMM specially for CVS
106 if len(ud.date) == 12:
107 options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
108 else:
109 options.append("-D \"%s UTC\"" % ud.date)
110 if ud.tag:
111 options.append("-r %s" % ud.tag)
112
113 cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
114 cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
115 cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
116
117 if cvs_rsh:
118 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
119 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
120
121 # create module directory
122 logger.debug(2, "Fetch: checking for module directory")
123 pkg = d.getVar('PN', True)
124 pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
125 moddir = os.path.join(pkgdir, localdir)
126 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
127 logger.info("Update " + ud.url)
128 bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
129 # update sources there
130 os.chdir(moddir)
131 cmd = cvsupdatecmd
132 else:
133 logger.info("Fetch " + ud.url)
134 # check out sources there
135 bb.utils.mkdirhier(pkgdir)
136 os.chdir(pkgdir)
137 logger.debug(1, "Running %s", cvscmd)
138 bb.fetch2.check_network_access(d, cvscmd, ud.url)
139 cmd = cvscmd
140
141 runfetchcmd(cmd, d, cleanup = [moddir])
142
143 if not os.access(moddir, os.R_OK):
144 raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
145
146 scmdata = ud.parm.get("scmdata", "")
147 if scmdata == "keep":
148 tar_flags = ""
149 else:
150 tar_flags = "--exclude 'CVS'"
151
152 # tar them up to a defined filename
153 if 'fullpath' in ud.parm:
154 os.chdir(pkgdir)
155 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
156 else:
157 os.chdir(moddir)
158 os.chdir('..')
159 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
160
161 runfetchcmd(cmd, d, cleanup = [ud.localpath])
162
163 def clean(self, ud, d):
164 """ Clean CVS Files and tarballs """
165
166 pkg = d.getVar('PN', True)
167 pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
168
169 bb.utils.remove(pkgdir, True)
170 bb.utils.remove(ud.localpath)
171
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
new file mode 100644
index 0000000000..5573f0a81e
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -0,0 +1,358 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git implementation
5
6git fetcher support the SRC_URI with format of:
7SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
8
9Supported SRC_URI options are:
10
11- branch
12 The git branch to retrieve from. The default is "master"
13
14 This option also supports multiple branch fetching, with branches
15 separated by commas. In multiple branches case, the name option
16 must have the same number of names to match the branches, which is
17 used to specify the SRC_REV for the branch
18 e.g:
19 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
20 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
21 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
22
23- tag
24 The git tag to retrieve. The default is "master"
25
26- protocol
27 The method to use to access the repository. Common options are "git",
28 "http", "https", "file", "ssh" and "rsync". The default is "git".
29
30- rebaseable
31 rebaseable indicates that the upstream git repo may rebase in the future,
32 and current revision may disappear from upstream repo. This option will
33 remind fetcher to preserve local cache carefully for future use.
34 The default value is "0", set rebaseable=1 for rebaseable git repo.
35
36- nocheckout
37 Don't checkout source code when unpacking. set this option for the recipe
38 who has its own routine to checkout code.
39 The default is "0", set nocheckout=1 if needed.
40
41- bareclone
42 Create a bare clone of the source code and don't checkout the source code
43 when unpacking. Set this option for the recipe who has its own routine to
44 checkout code and tracking branch requirements.
45 The default is "0", set bareclone=1 if needed.
46
47- nobranch
48 Don't check the SHA validation for branch. set this option for the recipe
49 referring to commit which is valid in tag instead of branch.
50 The default is "0", set nobranch=1 if needed.
51
52"""
53
54#Copyright (C) 2005 Richard Purdie
55#
56# This program is free software; you can redistribute it and/or modify
57# it under the terms of the GNU General Public License version 2 as
58# published by the Free Software Foundation.
59#
60# This program is distributed in the hope that it will be useful,
61# but WITHOUT ANY WARRANTY; without even the implied warranty of
62# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
63# GNU General Public License for more details.
64#
65# You should have received a copy of the GNU General Public License along
66# with this program; if not, write to the Free Software Foundation, Inc.,
67# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
68
69import os
70import bb
71from bb import data
72from bb.fetch2 import FetchMethod
73from bb.fetch2 import runfetchcmd
74from bb.fetch2 import logger
75
76class Git(FetchMethod):
77 """Class to fetch a module or modules from git repositories"""
78 def init(self, d):
79 pass
80
81 def supports(self, ud, d):
82 """
83 Check to see if a given url can be fetched with git.
84 """
85 return ud.type in ['git']
86
87 def supports_checksum(self, urldata):
88 return False
89
90 def urldata_init(self, ud, d):
91 """
92 init git specific variable within url data
93 so that the git method like latest_revision() can work
94 """
95 if 'protocol' in ud.parm:
96 ud.proto = ud.parm['protocol']
97 elif not ud.host:
98 ud.proto = 'file'
99 else:
100 ud.proto = "git"
101
102 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
103 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
104
105 ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
106
107 ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
108
109 ud.nobranch = ud.parm.get("nobranch","0") == "1"
110
111 # bareclone implies nocheckout
112 ud.bareclone = ud.parm.get("bareclone","0") == "1"
113 if ud.bareclone:
114 ud.nocheckout = 1
115
116 ud.unresolvedrev = {}
117 branches = ud.parm.get("branch", "master").split(',')
118 if len(branches) != len(ud.names):
119 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
120 ud.branches = {}
121 for name in ud.names:
122 branch = branches[ud.names.index(name)]
123 ud.branches[name] = branch
124 ud.unresolvedrev[name] = branch
125
126 ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
127
128 ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
129
130 ud.setup_revisons(d)
131
132 for name in ud.names:
133 # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
134 if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
135 if ud.revisions[name]:
136 ud.unresolvedrev[name] = ud.revisions[name]
137 ud.revisions[name] = self.latest_revision(ud, d, name)
138
139 gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
140 # for rebaseable git repo, it is necessary to keep mirror tar ball
141 # per revision, so that even the revision disappears from the
142 # upstream repo in the future, the mirror will remain intact and still
143 # contains the revision
144 if ud.rebaseable:
145 for name in ud.names:
146 gitsrcname = gitsrcname + '_' + ud.revisions[name]
147 ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
148 ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
149 gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
150 ud.clonedir = os.path.join(gitdir, gitsrcname)
151
152 ud.localfile = ud.clonedir
153
154 def localpath(self, ud, d):
155 return ud.clonedir
156
157 def need_update(self, ud, d):
158 if not os.path.exists(ud.clonedir):
159 return True
160 os.chdir(ud.clonedir)
161 for name in ud.names:
162 if not self._contains_ref(ud, d, name):
163 return True
164 if ud.write_tarballs and not os.path.exists(ud.fullmirror):
165 return True
166 return False
167
168 def try_premirror(self, ud, d):
169 # If we don't do this, updating an existing checkout with only premirrors
170 # is not possible
171 if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
172 return True
173 if os.path.exists(ud.clonedir):
174 return False
175 return True
176
177 def download(self, ud, d):
178 """Fetch url"""
179
180 if ud.user:
181 username = ud.user + '@'
182 else:
183 username = ""
184
185 ud.repochanged = not os.path.exists(ud.fullmirror)
186
187 # If the checkout doesn't exist and the mirror tarball does, extract it
188 if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
189 bb.utils.mkdirhier(ud.clonedir)
190 os.chdir(ud.clonedir)
191 runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
192
193 repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
194
195 # If the repo still doesn't exist, fallback to cloning it
196 if not os.path.exists(ud.clonedir):
197 # We do this since git will use a "-l" option automatically for local urls where possible
198 if repourl.startswith("file://"):
199 repourl = repourl[7:]
200 clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
201 if ud.proto.lower() != 'file':
202 bb.fetch2.check_network_access(d, clone_cmd)
203 runfetchcmd(clone_cmd, d)
204
205 os.chdir(ud.clonedir)
206 # Update the checkout if needed
207 needupdate = False
208 for name in ud.names:
209 if not self._contains_ref(ud, d, name):
210 needupdate = True
211 if needupdate:
212 try:
213 runfetchcmd("%s remote rm origin" % ud.basecmd, d)
214 except bb.fetch2.FetchError:
215 logger.debug(1, "No Origin")
216
217 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
218 fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
219 if ud.proto.lower() != 'file':
220 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
221 runfetchcmd(fetch_cmd, d)
222 runfetchcmd("%s prune-packed" % ud.basecmd, d)
223 runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
224 ud.repochanged = True
225 os.chdir(ud.clonedir)
226 for name in ud.names:
227 if not self._contains_ref(ud, d, name):
228 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
229
230 def build_mirror_data(self, ud, d):
231 # Generate a mirror tarball if needed
232 if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
233 # it's possible that this symlink points to read-only filesystem with PREMIRROR
234 if os.path.islink(ud.fullmirror):
235 os.unlink(ud.fullmirror)
236
237 os.chdir(ud.clonedir)
238 logger.info("Creating tarball of git repository")
239 runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
240 runfetchcmd("touch %s.done" % (ud.fullmirror), d)
241
242 def unpack(self, ud, destdir, d):
243 """ unpack the downloaded src to destdir"""
244
245 subdir = ud.parm.get("subpath", "")
246 if subdir != "":
247 readpathspec = ":%s" % (subdir)
248 def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
249 else:
250 readpathspec = ""
251 def_destsuffix = "git/"
252
253 destsuffix = ud.parm.get("destsuffix", def_destsuffix)
254 destdir = ud.destdir = os.path.join(destdir, destsuffix)
255 if os.path.exists(destdir):
256 bb.utils.prunedir(destdir)
257
258 cloneflags = "-s -n"
259 if ud.bareclone:
260 cloneflags += " --mirror"
261
262 # Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
263 # and you end up with some horrible union of the two when you attempt to clone it
264 # The least invasive workaround seems to be a symlink to the real directory to
265 # fool git into ignoring any .git version that may also be present.
266 #
267 # The issue is fixed in more recent versions of git so we can drop this hack in future
268 # when that version becomes common enough.
269 clonedir = ud.clonedir
270 if not ud.path.endswith(".git"):
271 indirectiondir = destdir[:-1] + ".indirectionsymlink"
272 if os.path.exists(indirectiondir):
273 os.remove(indirectiondir)
274 bb.utils.mkdirhier(os.path.dirname(indirectiondir))
275 os.symlink(ud.clonedir, indirectiondir)
276 clonedir = indirectiondir
277
278 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
279 if not ud.nocheckout:
280 os.chdir(destdir)
281 if subdir != "":
282 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
283 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
284 else:
285 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
286 return True
287
288 def clean(self, ud, d):
289 """ clean the git directory """
290
291 bb.utils.remove(ud.localpath, True)
292 bb.utils.remove(ud.fullmirror)
293 bb.utils.remove(ud.fullmirror + ".done")
294
295 def supports_srcrev(self):
296 return True
297
298 def _contains_ref(self, ud, d, name):
299 cmd = ""
300 if ud.nobranch:
301 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
302 ud.basecmd, ud.revisions[name])
303 else:
304 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
305 ud.basecmd, ud.revisions[name], ud.branches[name])
306 try:
307 output = runfetchcmd(cmd, d, quiet=True)
308 except bb.fetch2.FetchError:
309 return False
310 if len(output.split()) > 1:
311 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
312 return output.split()[0] != "0"
313
314 def _revision_key(self, ud, d, name):
315 """
316 Return a unique key for the url
317 """
318 return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
319
320 def _lsremote(self, ud, d, search):
321 """
322 Run git ls-remote with the specified search string
323 """
324 if ud.user:
325 username = ud.user + '@'
326 else:
327 username = ""
328
329 cmd = "%s ls-remote %s://%s%s%s %s" % \
330 (ud.basecmd, ud.proto, username, ud.host, ud.path, search)
331 if ud.proto.lower() != 'file':
332 bb.fetch2.check_network_access(d, cmd)
333 output = runfetchcmd(cmd, d, True)
334 if not output:
335 raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
336 return output
337
338 def _latest_revision(self, ud, d, name):
339 """
340 Compute the HEAD revision for the url
341 """
342 if ud.unresolvedrev[name][:5] == "refs/":
343 search = "%s %s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
344 else:
345 search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
346 output = self._lsremote(ud, d, search)
347 return output.split()[0]
348
349 def _build_revision(self, ud, d, name):
350 return ud.revisions[name]
351
352 def checkstatus(self, ud, d):
353 fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
354 try:
355 runfetchcmd(fetchcmd, d, quiet=True)
356 return True
357 except FetchError:
358 return False
diff --git a/bitbake/lib/bb/fetch2/gitannex.py b/bitbake/lib/bb/fetch2/gitannex.py
new file mode 100644
index 0000000000..0f37897450
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitannex.py
@@ -0,0 +1,76 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git annex implementation
5"""
6
7# Copyright (C) 2014 Otavio Salvador
8# Copyright (C) 2014 O.S. Systems Software LTDA.
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import os
24import bb
25from bb import data
26from bb.fetch2.git import Git
27from bb.fetch2 import runfetchcmd
28from bb.fetch2 import logger
29
30class GitANNEX(Git):
31 def supports(self, ud, d):
32 """
33 Check to see if a given url can be fetched with git.
34 """
35 return ud.type in ['gitannex']
36
37 def uses_annex(self, ud, d):
38 for name in ud.names:
39 try:
40 runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
41 return True
42 except bb.fetch.FetchError:
43 pass
44
45 return False
46
47 def update_annex(self, ud, d):
48 try:
49 runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
50 except bb.fetch.FetchError:
51 return False
52 runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
53
54 return True
55
56 def download(self, ud, d):
57 Git.download(self, ud, d)
58
59 os.chdir(ud.clonedir)
60 annex = self.uses_annex(ud, d)
61 if annex:
62 self.update_annex(ud, d)
63
64 def unpack(self, ud, destdir, d):
65 Git.unpack(self, ud, destdir, d)
66
67 os.chdir(ud.destdir)
68 try:
69 runfetchcmd("%s annex sync" % (ud.basecmd), d)
70 except bb.fetch.FetchError:
71 pass
72
73 annex = self.uses_annex(ud, d)
74 if annex:
75 runfetchcmd("%s annex get" % (ud.basecmd), d)
76 runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
new file mode 100644
index 0000000000..c125cff54b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -0,0 +1,136 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git submodules implementation
5
6Inherits from and extends the Git fetcher to retrieve submodules of a git repository
7after cloning.
8
9SRC_URI = "gitsm://<see Git fetcher for syntax>"
10
11See the Git fetcher, git://, for usage documentation.
12
13NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
14
15"""
16
17# Copyright (C) 2013 Richard Purdie
18#
19# This program is free software; you can redistribute it and/or modify
20# it under the terms of the GNU General Public License version 2 as
21# published by the Free Software Foundation.
22#
23# This program is distributed in the hope that it will be useful,
24# but WITHOUT ANY WARRANTY; without even the implied warranty of
25# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
26# GNU General Public License for more details.
27#
28# You should have received a copy of the GNU General Public License along
29# with this program; if not, write to the Free Software Foundation, Inc.,
30# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
31
32import os
33import bb
34from bb import data
35from bb.fetch2.git import Git
36from bb.fetch2 import runfetchcmd
37from bb.fetch2 import logger
38
39class GitSM(Git):
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with git.
43 """
44 return ud.type in ['gitsm']
45
46 def uses_submodules(self, ud, d):
47 for name in ud.names:
48 try:
49 runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
50 return True
51 except bb.fetch.FetchError:
52 pass
53 return False
54
55 def _set_relative_paths(self, repopath):
56 """
57 Fix submodule paths to be relative instead of absolute,
58 so that when we move the repo it doesn't break
59 (In Git 1.7.10+ this is done automatically)
60 """
61 submodules = []
62 with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
63 for line in f.readlines():
64 if line.startswith('[submodule'):
65 submodules.append(line.split('"')[1])
66
67 for module in submodules:
68 repo_conf = os.path.join(repopath, module, '.git')
69 if os.path.exists(repo_conf):
70 with open(repo_conf, 'r') as f:
71 lines = f.readlines()
72 newpath = ''
73 for i, line in enumerate(lines):
74 if line.startswith('gitdir:'):
75 oldpath = line.split(': ')[-1].rstrip()
76 if oldpath.startswith('/'):
77 newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
78 lines[i] = 'gitdir: %s\n' % newpath
79 break
80 if newpath:
81 with open(repo_conf, 'w') as f:
82 for line in lines:
83 f.write(line)
84
85 repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
86 if os.path.exists(repo_conf2):
87 with open(repo_conf2, 'r') as f:
88 lines = f.readlines()
89 newpath = ''
90 for i, line in enumerate(lines):
91 if line.lstrip().startswith('worktree = '):
92 oldpath = line.split(' = ')[-1].rstrip()
93 if oldpath.startswith('/'):
94 newpath = '../' * (module.count('/') + 3) + module
95 lines[i] = '\tworktree = %s\n' % newpath
96 break
97 if newpath:
98 with open(repo_conf2, 'w') as f:
99 for line in lines:
100 f.write(line)
101
102 def update_submodules(self, ud, d):
103 # We have to convert bare -> full repo, do the submodule bit, then convert back
104 tmpclonedir = ud.clonedir + ".tmp"
105 gitdir = tmpclonedir + os.sep + ".git"
106 bb.utils.remove(tmpclonedir, True)
107 os.mkdir(tmpclonedir)
108 os.rename(ud.clonedir, gitdir)
109 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
110 os.chdir(tmpclonedir)
111 runfetchcmd(ud.basecmd + " reset --hard", d)
112 runfetchcmd(ud.basecmd + " submodule init", d)
113 runfetchcmd(ud.basecmd + " submodule update", d)
114 self._set_relative_paths(tmpclonedir)
115 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
116 os.rename(gitdir, ud.clonedir,)
117 bb.utils.remove(tmpclonedir, True)
118
119 def download(self, ud, d):
120 Git.download(self, ud, d)
121
122 os.chdir(ud.clonedir)
123 submodules = self.uses_submodules(ud, d)
124 if submodules:
125 self.update_submodules(ud, d)
126
127 def unpack(self, ud, destdir, d):
128 Git.unpack(self, ud, destdir, d)
129
130 os.chdir(ud.destdir)
131 submodules = self.uses_submodules(ud, d)
132 if submodules:
133 runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d)
134 runfetchcmd(ud.basecmd + " submodule init", d)
135 runfetchcmd(ud.basecmd + " submodule update", d)
136
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
new file mode 100644
index 0000000000..81592f6e04
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -0,0 +1,193 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for mercurial DRCS (hg).
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10# Copyright (C) 2007 Robert Schuster
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27import os
28import sys
29import logging
30import bb
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import runfetchcmd
36from bb.fetch2 import logger
37
38class Hg(FetchMethod):
39 """Class to fetch from mercurial repositories"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with mercurial.
43 """
44 return ud.type in ['hg']
45
46 def urldata_init(self, ud, d):
47 """
48 init hg specific variable within url data
49 """
50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url)
52
53 ud.module = ud.parm["module"]
54
55 # Create paths to mercurial checkouts
56 relpath = self._strip_leading_slashes(ud.path)
57 ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
58 ud.moddir = os.path.join(ud.pkgdir, ud.module)
59
60 ud.setup_revisons(d)
61
62 if 'rev' in ud.parm:
63 ud.revision = ud.parm['rev']
64 elif not ud.revision:
65 ud.revision = self.latest_revision(ud, d)
66
67 ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
68
69 def need_update(self, ud, d):
70 revTag = ud.parm.get('rev', 'tip')
71 if revTag == "tip":
72 return True
73 if not os.path.exists(ud.localpath):
74 return True
75 return False
76
77 def _buildhgcommand(self, ud, d, command):
78 """
79 Build up an hg commandline based on ud
80 command is "fetch", "update", "info"
81 """
82
83 basecmd = data.expand('${FETCHCMD_hg}', d)
84
85 proto = ud.parm.get('protocol', 'http')
86
87 host = ud.host
88 if proto == "file":
89 host = "/"
90 ud.host = "localhost"
91
92 if not ud.user:
93 hgroot = host + ud.path
94 else:
95 if ud.pswd:
96 hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path
97 else:
98 hgroot = ud.user + "@" + host + ud.path
99
100 if command == "info":
101 return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
102
103 options = [];
104
105 # Don't specify revision for the fetch; clone the entire repo.
106 # This avoids an issue if the specified revision is a tag, because
107 # the tag actually exists in the specified revision + 1, so it won't
108 # be available when used in any successive commands.
109 if ud.revision and command != "fetch":
110 options.append("-r %s" % ud.revision)
111
112 if command == "fetch":
113 if ud.user and ud.pswd:
114 cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
115 else:
116 cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
117 elif command == "pull":
118 # do not pass options list; limiting pull to rev causes the local
119 # repo not to contain it and immediately following "update" command
120 # will crash
121 if ud.user and ud.pswd:
122 cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto)
123 else:
124 cmd = "%s pull" % (basecmd)
125 elif command == "update":
126 if ud.user and ud.pswd:
127 cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
128 else:
129 cmd = "%s update -C %s" % (basecmd, " ".join(options))
130 else:
131 raise FetchError("Invalid hg command %s" % command, ud.url)
132
133 return cmd
134
135 def download(self, ud, d):
136 """Fetch url"""
137
138 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
139
140 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
141 updatecmd = self._buildhgcommand(ud, d, "pull")
142 logger.info("Update " + ud.url)
143 # update sources there
144 os.chdir(ud.moddir)
145 logger.debug(1, "Running %s", updatecmd)
146 bb.fetch2.check_network_access(d, updatecmd, ud.url)
147 runfetchcmd(updatecmd, d)
148
149 else:
150 fetchcmd = self._buildhgcommand(ud, d, "fetch")
151 logger.info("Fetch " + ud.url)
152 # check out sources there
153 bb.utils.mkdirhier(ud.pkgdir)
154 os.chdir(ud.pkgdir)
155 logger.debug(1, "Running %s", fetchcmd)
156 bb.fetch2.check_network_access(d, fetchcmd, ud.url)
157 runfetchcmd(fetchcmd, d)
158
159 # Even when we clone (fetch), we still need to update as hg's clone
160 # won't checkout the specified revision if its on a branch
161 updatecmd = self._buildhgcommand(ud, d, "update")
162 os.chdir(ud.moddir)
163 logger.debug(1, "Running %s", updatecmd)
164 runfetchcmd(updatecmd, d)
165
166 scmdata = ud.parm.get("scmdata", "")
167 if scmdata == "keep":
168 tar_flags = ""
169 else:
170 tar_flags = "--exclude '.hg' --exclude '.hgrags'"
171
172 os.chdir(ud.pkgdir)
173 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
174
175 def supports_srcrev(self):
176 return True
177
178 def _latest_revision(self, ud, d, name):
179 """
180 Compute tip revision for the url
181 """
182 bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
183 output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
184 return output.strip()
185
186 def _build_revision(self, ud, d, name):
187 return ud.revision
188
189 def _revision_key(self, ud, d, name):
190 """
191 Return a unique key for the url
192 """
193 return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
new file mode 100644
index 0000000000..0785236a6b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -0,0 +1,128 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import urllib
30import bb
31import bb.utils
32from bb import data
33from bb.fetch2 import FetchMethod, FetchError
34from bb.fetch2 import logger
35
36class Local(FetchMethod):
37 def supports(self, urldata, d):
38 """
39 Check to see if a given url represents a local fetch.
40 """
41 return urldata.type in ['file']
42
43 def urldata_init(self, ud, d):
44 # We don't set localfile as for this fetcher the file is already local!
45 ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
46 ud.basename = os.path.basename(ud.decodedurl)
47 ud.basepath = ud.decodedurl
48 return
49
50 def localpath(self, urldata, d):
51 """
52 Return the local filename of a given url assuming a successful fetch.
53 """
54 return self.localpaths(urldata, d)[-1]
55
56 def localpaths(self, urldata, d):
57 """
58 Return the local filename of a given url assuming a successful fetch.
59 """
60 searched = []
61 path = urldata.decodedurl
62 newpath = path
63 if path[0] == "/":
64 return [path]
65 filespath = data.getVar('FILESPATH', d, True)
66 if filespath:
67 logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
68 newpath, hist = bb.utils.which(filespath, path, history=True)
69 searched.extend(hist)
70 if not newpath:
71 filesdir = data.getVar('FILESDIR', d, True)
72 if filesdir:
73 logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
74 newpath = os.path.join(filesdir, path)
75 searched.append(newpath)
76 if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
77 # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
78 newpath, hist = bb.utils.which(filespath, ".", history=True)
79 searched.extend(hist)
80 logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
81 return searched
82 if not os.path.exists(newpath):
83 dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
84 logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
85 bb.utils.mkdirhier(os.path.dirname(dldirfile))
86 searched.append(dldirfile)
87 return searched
88 return searched
89
90 def need_update(self, ud, d):
91 if ud.url.find("*") != -1:
92 return False
93 if os.path.exists(ud.localpath):
94 return False
95 return True
96
97 def download(self, urldata, d):
98 """Fetch urls (no-op for Local method)"""
99 # no need to fetch local files, we'll deal with them in place.
100 if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
101 locations = []
102 filespath = data.getVar('FILESPATH', d, True)
103 if filespath:
104 locations = filespath.split(":")
105 filesdir = data.getVar('FILESDIR', d, True)
106 if filesdir:
107 locations.append(filesdir)
108 locations.append(d.getVar("DL_DIR", True))
109
110 msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
111 raise FetchError(msg)
112
113 return True
114
115 def checkstatus(self, urldata, d):
116 """
117 Check the status of the url
118 """
119 if urldata.localpath.find("*") != -1:
120 logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
121 return True
122 if os.path.exists(urldata.localpath):
123 return True
124 return False
125
126 def clean(self, urldata, d):
127 return
128
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
new file mode 100644
index 0000000000..3d8779682f
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -0,0 +1,135 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4Bitbake "Fetch" implementation for osc (Opensuse build service client).
5Based on the svn "Fetch" implementation.
6
7"""
8
9import os
10import sys
11import logging
12import bb
13from bb import data
14from bb.fetch2 import FetchMethod
15from bb.fetch2 import FetchError
16from bb.fetch2 import MissingParameterError
17from bb.fetch2 import runfetchcmd
18
19class Osc(FetchMethod):
20 """Class to fetch a module or modules from Opensuse build server
21 repositories."""
22
23 def supports(self, ud, d):
24 """
25 Check to see if a given url can be fetched with osc.
26 """
27 return ud.type in ['osc']
28
29 def urldata_init(self, ud, d):
30 if not "module" in ud.parm:
31 raise MissingParameterError('module', ud.url)
32
33 ud.module = ud.parm["module"]
34
35 # Create paths to osc checkouts
36 relpath = self._strip_leading_slashes(ud.path)
37 ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
38 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
39
40 if 'rev' in ud.parm:
41 ud.revision = ud.parm['rev']
42 else:
43 pv = data.getVar("PV", d, 0)
44 rev = bb.fetch2.srcrev_internal_helper(ud, d)
45 if rev and rev != True:
46 ud.revision = rev
47 else:
48 ud.revision = ""
49
50 ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
51
52 def _buildosccommand(self, ud, d, command):
53 """
54 Build up an ocs commandline based on ud
55 command is "fetch", "update", "info"
56 """
57
58 basecmd = data.expand('${FETCHCMD_osc}', d)
59
60 proto = ud.parm.get('protocol', 'ocs')
61
62 options = []
63
64 config = "-c %s" % self.generate_config(ud, d)
65
66 if ud.revision:
67 options.append("-r %s" % ud.revision)
68
69 coroot = self._strip_leading_slashes(ud.path)
70
71 if command == "fetch":
72 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
73 elif command == "update":
74 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
75 else:
76 raise FetchError("Invalid osc command %s" % command, ud.url)
77
78 return osccmd
79
80 def download(self, ud, d):
81 """
82 Fetch url
83 """
84
85 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
86
87 if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
88 oscupdatecmd = self._buildosccommand(ud, d, "update")
89 logger.info("Update "+ ud.url)
90 # update sources there
91 os.chdir(ud.moddir)
92 logger.debug(1, "Running %s", oscupdatecmd)
93 bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
94 runfetchcmd(oscupdatecmd, d)
95 else:
96 oscfetchcmd = self._buildosccommand(ud, d, "fetch")
97 logger.info("Fetch " + ud.url)
98 # check out sources there
99 bb.utils.mkdirhier(ud.pkgdir)
100 os.chdir(ud.pkgdir)
101 logger.debug(1, "Running %s", oscfetchcmd)
102 bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
103 runfetchcmd(oscfetchcmd, d)
104
105 os.chdir(os.path.join(ud.pkgdir + ud.path))
106 # tar them up to a defined filename
107 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
108
109 def supports_srcrev(self):
110 return False
111
112 def generate_config(self, ud, d):
113 """
114 Generate a .oscrc to be used for this run.
115 """
116
117 config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
118 if (os.path.exists(config_path)):
119 os.remove(config_path)
120
121 f = open(config_path, 'w')
122 f.write("[general]\n")
123 f.write("apisrv = %s\n" % ud.host)
124 f.write("scheme = http\n")
125 f.write("su-wrapper = su -c\n")
126 f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
127 f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
128 f.write("extra-pkgs = gzip\n")
129 f.write("\n")
130 f.write("[%s]\n" % ud.host)
131 f.write("user = %s\n" % ud.parm["user"])
132 f.write("pass = %s\n" % ud.parm["pswd"])
133 f.close()
134
135 return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
new file mode 100644
index 0000000000..d079a33c62
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -0,0 +1,187 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from future_builtins import zip
29import os
30import subprocess
31import logging
32import bb
33from bb import data
34from bb.fetch2 import FetchMethod
35from bb.fetch2 import FetchError
36from bb.fetch2 import logger
37from bb.fetch2 import runfetchcmd
38
39class Perforce(FetchMethod):
40 def supports(self, ud, d):
41 return ud.type in ['p4']
42
43 def doparse(url, d):
44 parm = {}
45 path = url.split("://")[1]
46 delim = path.find("@");
47 if delim != -1:
48 (user, pswd, host, port) = path.split('@')[0].split(":")
49 path = path.split('@')[1]
50 else:
51 (host, port) = d.getVar('P4PORT').split(':')
52 user = ""
53 pswd = ""
54
55 if path.find(";") != -1:
56 keys=[]
57 values=[]
58 plist = path.split(';')
59 for item in plist:
60 if item.count('='):
61 (key, value) = item.split('=')
62 keys.append(key)
63 values.append(value)
64
65 parm = dict(zip(keys, values))
66 path = "//" + path.split(';')[0]
67 host += ":%s" % (port)
68 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
69
70 return host, path, user, pswd, parm
71 doparse = staticmethod(doparse)
72
73 def getcset(d, depot, host, user, pswd, parm):
74 p4opt = ""
75 if "cset" in parm:
76 return parm["cset"];
77 if user:
78 p4opt += " -u %s" % (user)
79 if pswd:
80 p4opt += " -P %s" % (pswd)
81 if host:
82 p4opt += " -p %s" % (host)
83
84 p4date = d.getVar("P4DATE", True)
85 if "revision" in parm:
86 depot += "#%s" % (parm["revision"])
87 elif "label" in parm:
88 depot += "@%s" % (parm["label"])
89 elif p4date:
90 depot += "@%s" % (p4date)
91
92 p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
93 logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
94 p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
95 cset = p4file.strip()
96 logger.debug(1, "READ %s", cset)
97 if not cset:
98 return -1
99
100 return cset.split(' ')[1]
101 getcset = staticmethod(getcset)
102
103 def urldata_init(self, ud, d):
104 (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
105
106 base_path = path.replace('/...', '')
107 base_path = self._strip_leading_slashes(base_path)
108
109 if "label" in parm:
110 version = parm["label"]
111 else:
112 version = Perforce.getcset(d, path, host, user, pswd, parm)
113
114 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
115
116 def download(self, ud, d):
117 """
118 Fetch urls
119 """
120
121 (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
122
123 if depot.find('/...') != -1:
124 path = depot[:depot.find('/...')]
125 else:
126 path = depot
127
128 module = parm.get('module', os.path.basename(path))
129
130 # Get the p4 command
131 p4opt = ""
132 if user:
133 p4opt += " -u %s" % (user)
134
135 if pswd:
136 p4opt += " -P %s" % (pswd)
137
138 if host:
139 p4opt += " -p %s" % (host)
140
141 p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
142
143 # create temp directory
144 logger.debug(2, "Fetch: creating temporary directory")
145 bb.utils.mkdirhier(d.expand('${WORKDIR}'))
146 mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
147 tmpfile, errors = bb.process.run(mktemp)
148 tmpfile = tmpfile.strip()
149 if not tmpfile:
150 raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
151
152 if "label" in parm:
153 depot = "%s@%s" % (depot, parm["label"])
154 else:
155 cset = Perforce.getcset(d, depot, host, user, pswd, parm)
156 depot = "%s@%s" % (depot, cset)
157
158 os.chdir(tmpfile)
159 logger.info("Fetch " + ud.url)
160 logger.info("%s%s files %s", p4cmd, p4opt, depot)
161 p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
162 p4file = [f.rstrip() for f in p4file.splitlines()]
163
164 if not p4file:
165 raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
166
167 count = 0
168
169 for file in p4file:
170 list = file.split()
171
172 if list[2] == "delete":
173 continue
174
175 dest = list[0][len(path)+1:]
176 where = dest.find("#")
177
178 subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
179 count = count + 1
180
181 if count == 0:
182 logger.error()
183 raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
184
185 runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
186 # cleanup
187 bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
new file mode 100644
index 0000000000..21678eb7d9
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -0,0 +1,98 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake "Fetch" repo (git) implementation
5
6"""
7
8# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
9#
10# Based on git.py which is:
11#Copyright (C) 2005 Richard Purdie
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import bb
28from bb import data
29from bb.fetch2 import FetchMethod
30from bb.fetch2 import runfetchcmd
31
32class Repo(FetchMethod):
33 """Class to fetch a module or modules from repo (git) repositories"""
34 def supports(self, ud, d):
35 """
36 Check to see if a given url can be fetched with repo.
37 """
38 return ud.type in ["repo"]
39
40 def urldata_init(self, ud, d):
41 """
42 We don"t care about the git rev of the manifests repository, but
43 we do care about the manifest to use. The default is "default".
44 We also care about the branch or tag to be used. The default is
45 "master".
46 """
47
48 ud.proto = ud.parm.get('protocol', 'git')
49 ud.branch = ud.parm.get('branch', 'master')
50 ud.manifest = ud.parm.get('manifest', 'default.xml')
51 if not ud.manifest.endswith('.xml'):
52 ud.manifest += '.xml'
53
54 ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
55
56 def download(self, ud, d):
57 """Fetch url"""
58
59 if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
60 logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
61 return
62
63 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
64 repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
65 codir = os.path.join(repodir, gitsrcname, ud.manifest)
66
67 if ud.user:
68 username = ud.user + "@"
69 else:
70 username = ""
71
72 bb.utils.mkdirhier(os.path.join(codir, "repo"))
73 os.chdir(os.path.join(codir, "repo"))
74 if not os.path.exists(os.path.join(codir, "repo", ".repo")):
75 bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
76 runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
77
78 bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
79 runfetchcmd("repo sync", d)
80 os.chdir(codir)
81
82 scmdata = ud.parm.get("scmdata", "")
83 if scmdata == "keep":
84 tar_flags = ""
85 else:
86 tar_flags = "--exclude '.repo' --exclude '.git'"
87
88 # Create a cache
89 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
90
91 def supports_srcrev(self):
92 return False
93
94 def _build_revision(self, ud, d):
95 return ud.manifest
96
97 def _want_sortable_revision(self, ud, d):
98 return False
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
new file mode 100644
index 0000000000..8ea4ef2ff3
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -0,0 +1,129 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake SFTP Fetch implementation
5
6Class for fetching files via SFTP. It tries to adhere to the (now
7expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
8Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
9(SSH)" (SECSH URI).
10
11It uses SFTP (as to adhere to the SECSH URI specification). It only
12supports key based authentication, not password. This class, unlike
13the SSH fetcher, does not support fetching a directory tree from the
14remote.
15
16 http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
17 https://www.iana.org/assignments/uri-schemes/prov/sftp
18 https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
19
20Please note that '/' is used as host path seperator, and not ":"
21as you may be used to from the scp/sftp commands. You can use a
22~ (tilde) to specify a path relative to your home directory.
23(The /~user/ syntax, for specyfing a path relative to another
24user's home directory is not supported.) Note that the tilde must
25still follow the host path seperator ("/"). See exampels below.
26
27Example SRC_URIs:
28
29SRC_URI = "sftp://host.example.com/dir/path.file.txt"
30
31A path relative to your home directory.
32
33SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
34
35You can also specify a username (specyfing password in the
36URI is not supported, use SSH keys to authenticate):
37
38SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
39
40"""
41
42# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
43#
44# Based in part on bb.fetch2.wget:
45# Copyright (C) 2003, 2004 Chris Larson
46#
47# This program is free software; you can redistribute it and/or modify
48# it under the terms of the GNU General Public License version 2 as
49# published by the Free Software Foundation.
50#
51# This program is distributed in the hope that it will be useful,
52# but WITHOUT ANY WARRANTY; without even the implied warranty of
53# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
54# GNU General Public License for more details.
55#
56# You should have received a copy of the GNU General Public License along
57# with this program; if not, write to the Free Software Foundation, Inc.,
58# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
59#
60# Based on functions from the base bb module, Copyright 2003 Holger Schurig
61
62import os
63import bb
64import urllib
65import commands
66from bb import data
67from bb.fetch2 import URI
68from bb.fetch2 import FetchMethod
69from bb.fetch2 import runfetchcmd
70
71
72class SFTP(FetchMethod):
73 """Class to fetch urls via 'sftp'"""
74
75 def supports(self, ud, d):
76 """
77 Check to see if a given url can be fetched with sftp.
78 """
79 return ud.type in ['sftp']
80
81 def recommends_checksum(self, urldata):
82 return True
83
84 def urldata_init(self, ud, d):
85 if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
86 raise bb.fetch2.ParameterError(
87 "Invalid protocol - if you wish to fetch from a " +
88 "git repository using ssh, you need to use the " +
89 "git:// prefix with protocol=ssh", ud.url)
90
91 if 'downloadfilename' in ud.parm:
92 ud.basename = ud.parm['downloadfilename']
93 else:
94 ud.basename = os.path.basename(ud.path)
95
96 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
97
98 def download(self, ud, d):
99 """Fetch urls"""
100
101 urlo = URI(ud.url)
102 basecmd = 'sftp -oPasswordAuthentication=no'
103 port = ''
104 if urlo.port:
105 port = '-P %d' % urlo.port
106 urlo.port = None
107
108 dldir = data.getVar('DL_DIR', d, True)
109 lpath = os.path.join(dldir, ud.localfile)
110
111 user = ''
112 if urlo.userinfo:
113 user = urlo.userinfo + '@'
114
115 path = urlo.path
116
117 # Supoprt URIs relative to the user's home directory, with
118 # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
119 if path[:3] == '/~/':
120 path = path[3:]
121
122 remote = '%s%s:%s' % (user, urlo.hostname, path)
123
124 cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
125 commands.mkarg(lpath))
126
127 bb.fetch2.check_network_access(d, cmd, ud.url)
128 runfetchcmd(cmd, d)
129 return True
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
new file mode 100644
index 0000000000..4ae979472c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -0,0 +1,127 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3'''
4BitBake 'Fetch' implementations
5
6This implementation is for Secure Shell (SSH), and attempts to comply with the
7IETF secsh internet draft:
8 http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
9
10 Currently does not support the sftp parameters, as this uses scp
11 Also does not support the 'fingerprint' connection parameter.
12
13 Please note that '/' is used as host, path separator not ':' as you may
14 be used to, also '~' can be used to specify user HOME, but again after '/'
15
16 Example SRC_URI:
17 SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
18 SRC_URI = "ssh://user@host.example.com/~/file.txt"
19'''
20
21# Copyright (C) 2006 OpenedHand Ltd.
22#
23#
24# Based in part on svk.py:
25# Copyright (C) 2006 Holger Hans Peter Freyther
26# Based on svn.py:
27# Copyright (C) 2003, 2004 Chris Larson
28# Based on functions from the base bb module:
29# Copyright 2003 Holger Schurig
30#
31#
32# This program is free software; you can redistribute it and/or modify
33# it under the terms of the GNU General Public License version 2 as
34# published by the Free Software Foundation.
35#
36# This program is distributed in the hope that it will be useful,
37# but WITHOUT ANY WARRANTY; without even the implied warranty of
38# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
39# GNU General Public License for more details.
40#
41# You should have received a copy of the GNU General Public License along
42# with this program; if not, write to the Free Software Foundation, Inc.,
43# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
44
45import re, os
46from bb import data
47from bb.fetch2 import FetchMethod
48from bb.fetch2 import FetchError
49from bb.fetch2 import logger
50from bb.fetch2 import runfetchcmd
51
52
53__pattern__ = re.compile(r'''
54 \s* # Skip leading whitespace
55 ssh:// # scheme
56 ( # Optional username/password block
57 (?P<user>\S+) # username
58 (:(?P<pass>\S+))? # colon followed by the password (optional)
59 )?
60 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
61 @
62 (?P<host>\S+?) # non-greedy match of the host
63 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
64 /
65 (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
66 # and may include the use of '~' to reference the remote home
67 # directory
68 (?P<sparam>(;[^;]+)*)? # parameters block (optional)
69 $
70''', re.VERBOSE)
71
72class SSH(FetchMethod):
73 '''Class to fetch a module or modules via Secure Shell'''
74
75 def supports(self, urldata, d):
76 return __pattern__.match(urldata.url) != None
77
78 def supports_checksum(self, urldata):
79 return False
80
81 def urldata_init(self, urldata, d):
82 if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
83 raise bb.fetch2.ParameterError(
84 "Invalid protocol - if you wish to fetch from a git " +
85 "repository using ssh, you need to use " +
86 "git:// prefix with protocol=ssh", urldata.url)
87 m = __pattern__.match(urldata.url)
88 path = m.group('path')
89 host = m.group('host')
90 urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
91
92 def download(self, urldata, d):
93 dldir = d.getVar('DL_DIR', True)
94
95 m = __pattern__.match(urldata.url)
96 path = m.group('path')
97 host = m.group('host')
98 port = m.group('port')
99 user = m.group('user')
100 password = m.group('pass')
101
102 if port:
103 portarg = '-P %s' % port
104 else:
105 portarg = ''
106
107 if user:
108 fr = user
109 if password:
110 fr += ':%s' % password
111 fr += '@%s' % host
112 else:
113 fr = host
114 fr += ':%s' % path
115
116
117 import commands
118 cmd = 'scp -B -r %s %s %s/' % (
119 portarg,
120 commands.mkarg(fr),
121 commands.mkarg(dldir)
122 )
123
124 bb.fetch2.check_network_access(d, cmd, urldata.url)
125
126 runfetchcmd(cmd, d)
127
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
new file mode 100644
index 0000000000..1733c2beb6
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -0,0 +1,192 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for svn.
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23#
24# Based on functions from the base bb module, Copyright 2003 Holger Schurig
25
26import os
27import sys
28import logging
29import bb
30import re
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import runfetchcmd
36from bb.fetch2 import logger
37
38class Svn(FetchMethod):
39 """Class to fetch a module or modules from svn repositories"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with svn.
43 """
44 return ud.type in ['svn']
45
46 def urldata_init(self, ud, d):
47 """
48 init svn specific variable within url data
49 """
50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url)
52
53 ud.basecmd = d.getVar('FETCHCMD_svn', True)
54
55 ud.module = ud.parm["module"]
56
57 # Create paths to svn checkouts
58 relpath = self._strip_leading_slashes(ud.path)
59 ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
60 ud.moddir = os.path.join(ud.pkgdir, ud.module)
61
62 ud.setup_revisons(d)
63
64 if 'rev' in ud.parm:
65 ud.revision = ud.parm['rev']
66
67 ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
68
69 def _buildsvncommand(self, ud, d, command):
70 """
71 Build up an svn commandline based on ud
72 command is "fetch", "update", "info"
73 """
74
75 proto = ud.parm.get('protocol', 'svn')
76
77 svn_rsh = None
78 if proto == "svn+ssh" and "rsh" in ud.parm:
79 svn_rsh = ud.parm["rsh"]
80
81 svnroot = ud.host + ud.path
82
83 options = []
84
85 options.append("--no-auth-cache")
86
87 if ud.user:
88 options.append("--username %s" % ud.user)
89
90 if ud.pswd:
91 options.append("--password %s" % ud.pswd)
92
93 if command == "info":
94 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
95 elif command == "log1":
96 svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
97 else:
98 suffix = ""
99 if ud.revision:
100 options.append("-r %s" % ud.revision)
101 suffix = "@%s" % (ud.revision)
102
103 if command == "fetch":
104 transportuser = ud.parm.get("transportuser", "")
105 svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.module)
106 elif command == "update":
107 svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
108 else:
109 raise FetchError("Invalid svn command %s" % command, ud.url)
110
111 if svn_rsh:
112 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
113
114 return svncmd
115
116 def download(self, ud, d):
117 """Fetch url"""
118
119 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
120
121 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
122 svnupdatecmd = self._buildsvncommand(ud, d, "update")
123 logger.info("Update " + ud.url)
124 # update sources there
125 os.chdir(ud.moddir)
126 # We need to attempt to run svn upgrade first in case its an older working format
127 try:
128 runfetchcmd(ud.basecmd + " upgrade", d)
129 except FetchError:
130 pass
131 logger.debug(1, "Running %s", svnupdatecmd)
132 bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
133 runfetchcmd(svnupdatecmd, d)
134 else:
135 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
136 logger.info("Fetch " + ud.url)
137 # check out sources there
138 bb.utils.mkdirhier(ud.pkgdir)
139 os.chdir(ud.pkgdir)
140 logger.debug(1, "Running %s", svnfetchcmd)
141 bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
142 runfetchcmd(svnfetchcmd, d)
143
144 scmdata = ud.parm.get("scmdata", "")
145 if scmdata == "keep":
146 tar_flags = ""
147 else:
148 tar_flags = "--exclude '.svn'"
149
150 os.chdir(ud.pkgdir)
151 # tar them up to a defined filename
152 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
153
154 def clean(self, ud, d):
155 """ Clean SVN specific files and dirs """
156
157 bb.utils.remove(ud.localpath)
158 bb.utils.remove(ud.moddir, True)
159
160
161 def supports_srcrev(self):
162 return True
163
164 def _revision_key(self, ud, d, name):
165 """
166 Return a unique key for the url
167 """
168 return "svn:" + ud.moddir
169
170 def _latest_revision(self, ud, d, name):
171 """
172 Return the latest upstream revision number
173 """
174 bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"))
175
176 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True)
177
178 # skip the first line, as per output of svn log
179 # then we expect the revision on the 2nd line
180 revision = re.search('^r([0-9]*)', output.splitlines()[1]).group(1)
181
182 return revision
183
184 def sortable_revision(self, ud, d, name):
185 """
186 Return a sortable revision number which in our case is the revision number
187 """
188
189 return False, self._build_revision(ud, d)
190
191 def _build_revision(self, ud, d):
192 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
new file mode 100644
index 0000000000..0456490368
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -0,0 +1,106 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import logging
30import bb
31import urllib
32from bb import data
33from bb.fetch2 import FetchMethod
34from bb.fetch2 import FetchError
35from bb.fetch2 import logger
36from bb.fetch2 import runfetchcmd
37
38class Wget(FetchMethod):
39 """Class to fetch urls via 'wget'"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with wget.
43 """
44 return ud.type in ['http', 'https', 'ftp']
45
46 def recommends_checksum(self, urldata):
47 return True
48
49 def urldata_init(self, ud, d):
50 if 'protocol' in ud.parm:
51 if ud.parm['protocol'] == 'git':
52 raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
53
54 if 'downloadfilename' in ud.parm:
55 ud.basename = ud.parm['downloadfilename']
56 else:
57 ud.basename = os.path.basename(ud.path)
58
59 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
60
61 self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
62
63 def _runwget(self, ud, d, command, quiet):
64
65 logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
66 bb.fetch2.check_network_access(d, command)
67 runfetchcmd(command, d, quiet)
68
69 def download(self, ud, d):
70 """Fetch urls"""
71
72 fetchcmd = self.basecmd
73
74 if 'downloadfilename' in ud.parm:
75 dldir = d.getVar("DL_DIR", True)
76 bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
77 fetchcmd += " -O " + dldir + os.sep + ud.localfile
78
79 uri = ud.url.split(";")[0]
80 if os.path.exists(ud.localpath):
81 # file exists, but we didnt complete it.. trying again..
82 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
83 else:
84 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
85
86 self._runwget(ud, d, fetchcmd, False)
87
88 # Sanity check since wget can pretend it succeed when it didn't
89 # Also, this used to happen if sourceforge sent us to the mirror page
90 if not os.path.exists(ud.localpath):
91 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
92
93 if os.path.getsize(ud.localpath) == 0:
94 os.remove(ud.localpath)
95 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
96
97 return True
98
99 def checkstatus(self, ud, d):
100
101 uri = ud.url.split(";")[0]
102 fetchcmd = self.basecmd + " --spider '%s'" % uri
103
104 self._runwget(ud, d, fetchcmd, True)
105
106 return True
diff --git a/bitbake/lib/bb/methodpool.py b/bitbake/lib/bb/methodpool.py
new file mode 100644
index 0000000000..bf2e9f5542
--- /dev/null
+++ b/bitbake/lib/bb/methodpool.py
@@ -0,0 +1,29 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4#
5# Copyright (C) 2006 Holger Hans Peter Freyther
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20from bb.utils import better_compile, better_exec
21
22def insert_method(modulename, code, fn):
23 """
24 Add code of a module should be added. The methods
25 will be simply added, no checking will be done
26 """
27 comp = better_compile(code, modulename, fn )
28 better_exec(comp, None, code, fn)
29
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py
new file mode 100644
index 0000000000..466523c6e4
--- /dev/null
+++ b/bitbake/lib/bb/monitordisk.py
@@ -0,0 +1,263 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2012 Robert Yang
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20import os, logging, re, sys
21import bb
22logger = logging.getLogger("BitBake.Monitor")
23
24def printErr(info):
25 logger.error("%s\n Disk space monitor will NOT be enabled" % info)
26
27def convertGMK(unit):
28
29 """ Convert the space unit G, M, K, the unit is case-insensitive """
30
31 unitG = re.match('([1-9][0-9]*)[gG]\s?$', unit)
32 if unitG:
33 return int(unitG.group(1)) * (1024 ** 3)
34 unitM = re.match('([1-9][0-9]*)[mM]\s?$', unit)
35 if unitM:
36 return int(unitM.group(1)) * (1024 ** 2)
37 unitK = re.match('([1-9][0-9]*)[kK]\s?$', unit)
38 if unitK:
39 return int(unitK.group(1)) * 1024
40 unitN = re.match('([1-9][0-9]*)\s?$', unit)
41 if unitN:
42 return int(unitN.group(1))
43 else:
44 return None
45
46def getMountedDev(path):
47
48 """ Get the device mounted at the path, uses /proc/mounts """
49
50 # Get the mount point of the filesystem containing path
51 # st_dev is the ID of device containing file
52 parentDev = os.stat(path).st_dev
53 currentDev = parentDev
54 # When the current directory's device is different from the
55 # parent's, then the current directory is a mount point
56 while parentDev == currentDev:
57 mountPoint = path
58 # Use dirname to get the parent's directory
59 path = os.path.dirname(path)
60 # Reach the "/"
61 if path == mountPoint:
62 break
63 parentDev= os.stat(path).st_dev
64
65 try:
66 with open("/proc/mounts", "r") as ifp:
67 for line in ifp:
68 procLines = line.rstrip('\n').split()
69 if procLines[1] == mountPoint:
70 return procLines[0]
71 except EnvironmentError:
72 pass
73 return None
74
75def getDiskData(BBDirs, configuration):
76
77 """Prepare disk data for disk space monitor"""
78
79 # Save the device IDs, need the ID to be unique (the dictionary's key is
80 # unique), so that when more than one directory is located on the same
81 # device, we just monitor it once
82 devDict = {}
83 for pathSpaceInode in BBDirs.split():
84 # The input format is: "dir,space,inode", dir is a must, space
85 # and inode are optional
86 pathSpaceInodeRe = re.match('([^,]*),([^,]*),([^,]*),?(.*)', pathSpaceInode)
87 if not pathSpaceInodeRe:
88 printErr("Invalid value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
89 return None
90
91 action = pathSpaceInodeRe.group(1)
92 if action not in ("ABORT", "STOPTASKS", "WARN"):
93 printErr("Unknown disk space monitor action: %s" % action)
94 return None
95
96 path = os.path.realpath(pathSpaceInodeRe.group(2))
97 if not path:
98 printErr("Invalid path value in BB_DISKMON_DIRS: %s" % pathSpaceInode)
99 return None
100
101 # The disk space or inode is optional, but it should have a correct
102 # value once it is specified
103 minSpace = pathSpaceInodeRe.group(3)
104 if minSpace:
105 minSpace = convertGMK(minSpace)
106 if not minSpace:
107 printErr("Invalid disk space value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(3))
108 return None
109 else:
110 # None means that it is not specified
111 minSpace = None
112
113 minInode = pathSpaceInodeRe.group(4)
114 if minInode:
115 minInode = convertGMK(minInode)
116 if not minInode:
117 printErr("Invalid inode value in BB_DISKMON_DIRS: %s" % pathSpaceInodeRe.group(4))
118 return None
119 else:
120 # None means that it is not specified
121 minInode = None
122
123 if minSpace is None and minInode is None:
124 printErr("No disk space or inode value in found BB_DISKMON_DIRS: %s" % pathSpaceInode)
125 return None
126 # mkdir for the directory since it may not exist, for example the
127 # DL_DIR may not exist at the very beginning
128 if not os.path.exists(path):
129 bb.utils.mkdirhier(path)
130 dev = getMountedDev(path)
131 # Use path/action as the key
132 devDict[os.path.join(path, action)] = [dev, minSpace, minInode]
133
134 return devDict
135
136def getInterval(configuration):
137
138 """ Get the disk space interval """
139
140 # The default value is 50M and 5K.
141 spaceDefault = 50 * 1024 * 1024
142 inodeDefault = 5 * 1024
143
144 interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", True)
145 if not interval:
146 return spaceDefault, inodeDefault
147 else:
148 # The disk space or inode interval is optional, but it should
149 # have a correct value once it is specified
150 intervalRe = re.match('([^,]*),?\s*(.*)', interval)
151 if intervalRe:
152 intervalSpace = intervalRe.group(1)
153 if intervalSpace:
154 intervalSpace = convertGMK(intervalSpace)
155 if not intervalSpace:
156 printErr("Invalid disk space interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(1))
157 return None, None
158 else:
159 intervalSpace = spaceDefault
160 intervalInode = intervalRe.group(2)
161 if intervalInode:
162 intervalInode = convertGMK(intervalInode)
163 if not intervalInode:
164 printErr("Invalid disk inode interval value in BB_DISKMON_WARNINTERVAL: %s" % intervalRe.group(2))
165 return None, None
166 else:
167 intervalInode = inodeDefault
168 return intervalSpace, intervalInode
169 else:
170 printErr("Invalid interval value in BB_DISKMON_WARNINTERVAL: %s" % interval)
171 return None, None
172
173class diskMonitor:
174
175 """Prepare the disk space monitor data"""
176
177 def __init__(self, configuration):
178
179 self.enableMonitor = False
180 self.configuration = configuration
181
182 BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None
183 if BBDirs:
184 self.devDict = getDiskData(BBDirs, configuration)
185 if self.devDict:
186 self.spaceInterval, self.inodeInterval = getInterval(configuration)
187 if self.spaceInterval and self.inodeInterval:
188 self.enableMonitor = True
189 # These are for saving the previous disk free space and inode, we
190 # use them to avoid printing too many warning messages
191 self.preFreeS = {}
192 self.preFreeI = {}
193 # This is for STOPTASKS and ABORT, to avoid printing the message
194 # repeatedly while waiting for the tasks to finish
195 self.checked = {}
196 for k in self.devDict:
197 self.preFreeS[k] = 0
198 self.preFreeI[k] = 0
199 self.checked[k] = False
200 if self.spaceInterval is None and self.inodeInterval is None:
201 self.enableMonitor = False
202
203 def check(self, rq):
204
205 """ Take action for the monitor """
206
207 if self.enableMonitor:
208 for k in self.devDict:
209 path = os.path.dirname(k)
210 action = os.path.basename(k)
211 dev = self.devDict[k][0]
212 minSpace = self.devDict[k][1]
213 minInode = self.devDict[k][2]
214
215 st = os.statvfs(path)
216
217 # The free space, float point number
218 freeSpace = st.f_bavail * st.f_frsize
219
220 if minSpace and freeSpace < minSpace:
221 # Always show warning, the self.checked would always be False if the action is WARN
222 if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
223 logger.warn("The free space of %s (%s) is running low (%.3fGB left)" % \
224 (path, dev, freeSpace / 1024 / 1024 / 1024.0))
225 self.preFreeS[k] = freeSpace
226
227 if action == "STOPTASKS" and not self.checked[k]:
228 logger.error("No new tasks can be executed since the disk space monitor action is \"STOPTASKS\"!")
229 self.checked[k] = True
230 rq.finish_runqueue(False)
231 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
232 elif action == "ABORT" and not self.checked[k]:
233 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
234 self.checked[k] = True
235 rq.finish_runqueue(True)
236 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
237
238 # The free inodes, float point number
239 freeInode = st.f_favail
240
241 if minInode and freeInode < minInode:
242 # Some filesystems use dynamic inodes so can't run out
243 # (e.g. btrfs). This is reported by the inode count being 0.
244 if st.f_files == 0:
245 self.devDict[k][2] = None
246 continue
247 # Always show warning, the self.checked would always be False if the action is WARN
248 if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]:
249 logger.warn("The free inode of %s (%s) is running low (%.3fK left)" % \
250 (path, dev, freeInode / 1024.0))
251 self.preFreeI[k] = freeInode
252
253 if action == "STOPTASKS" and not self.checked[k]:
254 logger.error("No new tasks can be executed since the disk space monitor action is \"STOPTASKS\"!")
255 self.checked[k] = True
256 rq.finish_runqueue(False)
257 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
258 elif action == "ABORT" and not self.checked[k]:
259 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
260 self.checked[k] = True
261 rq.finish_runqueue(True)
262 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
263 return
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
new file mode 100644
index 0000000000..d79768db24
--- /dev/null
+++ b/bitbake/lib/bb/msg.py
@@ -0,0 +1,196 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'msg' implementation
5
6Message handling infrastructure for bitbake
7
8"""
9
10# Copyright (C) 2006 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import sys
26import copy
27import logging
28import collections
29from itertools import groupby
30import warnings
31import bb
32import bb.event
33
34class BBLogFormatter(logging.Formatter):
35 """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is"""
36
37 DEBUG3 = logging.DEBUG - 2
38 DEBUG2 = logging.DEBUG - 1
39 DEBUG = logging.DEBUG
40 VERBOSE = logging.INFO - 1
41 NOTE = logging.INFO
42 PLAIN = logging.INFO + 1
43 ERROR = logging.ERROR
44 WARNING = logging.WARNING
45 CRITICAL = logging.CRITICAL
46
47 levelnames = {
48 DEBUG3 : 'DEBUG',
49 DEBUG2 : 'DEBUG',
50 DEBUG : 'DEBUG',
51 VERBOSE: 'NOTE',
52 NOTE : 'NOTE',
53 PLAIN : '',
54 WARNING : 'WARNING',
55 ERROR : 'ERROR',
56 CRITICAL: 'ERROR',
57 }
58
59 color_enabled = False
60 BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(29,38)
61
62 COLORS = {
63 DEBUG3 : CYAN,
64 DEBUG2 : CYAN,
65 DEBUG : CYAN,
66 VERBOSE : BASECOLOR,
67 NOTE : BASECOLOR,
68 PLAIN : BASECOLOR,
69 WARNING : YELLOW,
70 ERROR : RED,
71 CRITICAL: RED,
72 }
73
74 BLD = '\033[1;%dm'
75 STD = '\033[%dm'
76 RST = '\033[0m'
77
78 def getLevelName(self, levelno):
79 try:
80 return self.levelnames[levelno]
81 except KeyError:
82 self.levelnames[levelno] = value = 'Level %d' % levelno
83 return value
84
85 def format(self, record):
86 record.levelname = self.getLevelName(record.levelno)
87 if record.levelno == self.PLAIN:
88 msg = record.getMessage()
89 else:
90 if self.color_enabled:
91 record = self.colorize(record)
92 msg = logging.Formatter.format(self, record)
93
94 if hasattr(record, 'bb_exc_info'):
95 etype, value, tb = record.bb_exc_info
96 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
97 msg += '\n' + ''.join(formatted)
98 return msg
99
100 def colorize(self, record):
101 color = self.COLORS[record.levelno]
102 if self.color_enabled and color is not None:
103 record = copy.copy(record)
104 record.levelname = "".join([self.BLD % color, record.levelname, self.RST])
105 record.msg = "".join([self.STD % color, record.msg, self.RST])
106 return record
107
108 def enable_color(self):
109 self.color_enabled = True
110
111class BBLogFilter(object):
112 def __init__(self, handler, level, debug_domains):
113 self.stdlevel = level
114 self.debug_domains = debug_domains
115 loglevel = level
116 for domain in debug_domains:
117 if debug_domains[domain] < loglevel:
118 loglevel = debug_domains[domain]
119 handler.setLevel(loglevel)
120 handler.addFilter(self)
121
122 def filter(self, record):
123 if record.levelno >= self.stdlevel:
124 return True
125 if record.name in self.debug_domains and record.levelno >= self.debug_domains[record.name]:
126 return True
127 return False
128
129class BBLogFilterStdErr(BBLogFilter):
130 def filter(self, record):
131 if not BBLogFilter.filter(self, record):
132 return False
133 if record.levelno >= logging.ERROR:
134 return True
135 return False
136
137class BBLogFilterStdOut(BBLogFilter):
138 def filter(self, record):
139 if not BBLogFilter.filter(self, record):
140 return False
141 if record.levelno < logging.ERROR:
142 return True
143 return False
144
145# Message control functions
146#
147
148loggerDefaultDebugLevel = 0
149loggerDefaultVerbose = False
150loggerVerboseLogs = False
151loggerDefaultDomains = []
152
153def init_msgconfig(verbose, debug, debug_domains = []):
154 """
155 Set default verbosity and debug levels config the logger
156 """
157 bb.msg.loggerDefaultDebugLevel = debug
158 bb.msg.loggerDefaultVerbose = verbose
159 if verbose:
160 bb.msg.loggerVerboseLogs = True
161 bb.msg.loggerDefaultDomains = debug_domains
162
163def constructLogOptions():
164 debug = loggerDefaultDebugLevel
165 verbose = loggerDefaultVerbose
166 domains = loggerDefaultDomains
167
168 if debug:
169 level = BBLogFormatter.DEBUG - debug + 1
170 elif verbose:
171 level = BBLogFormatter.VERBOSE
172 else:
173 level = BBLogFormatter.NOTE
174
175 debug_domains = {}
176 for (domainarg, iterator) in groupby(domains):
177 dlevel = len(tuple(iterator))
178 debug_domains["BitBake.%s" % domainarg] = logging.DEBUG - dlevel + 1
179 return level, debug_domains
180
181def addDefaultlogFilter(handler, cls = BBLogFilter):
182 level, debug_domains = constructLogOptions()
183
184 cls(handler, level, debug_domains)
185
186#
187# Message handling functions
188#
189
190def fatal(msgdomain, msg):
191 if msgdomain:
192 logger = logging.getLogger("BitBake.%s" % msgdomain)
193 else:
194 logger = logging.getLogger("BitBake")
195 logger.critical(msg)
196 sys.exit(1)
diff --git a/bitbake/lib/bb/namedtuple_with_abc.py b/bitbake/lib/bb/namedtuple_with_abc.py
new file mode 100644
index 0000000000..32f2fc642c
--- /dev/null
+++ b/bitbake/lib/bb/namedtuple_with_abc.py
@@ -0,0 +1,255 @@
1# http://code.activestate.com/recipes/577629-namedtupleabc-abstract-base-class-mix-in-for-named/
2#!/usr/bin/env python
3# Copyright (c) 2011 Jan Kaliszewski (zuo). Available under the MIT License.
4
5"""
6namedtuple_with_abc.py:
7* named tuple mix-in + ABC (abstract base class) recipe,
8* works under Python 2.6, 2.7 as well as 3.x.
9
10Import this module to patch collections.namedtuple() factory function
11-- enriching it with the 'abc' attribute (an abstract base class + mix-in
12for named tuples) and decorating it with a wrapper that registers each
13newly created named tuple as a subclass of namedtuple.abc.
14
15How to import:
16 import collections, namedtuple_with_abc
17or:
18 import namedtuple_with_abc
19 from collections import namedtuple
20 # ^ in this variant you must import namedtuple function
21 # *after* importing namedtuple_with_abc module
22or simply:
23 from namedtuple_with_abc import namedtuple
24
25Simple usage example:
26 class Credentials(namedtuple.abc):
27 _fields = 'username password'
28 def __str__(self):
29 return ('{0.__class__.__name__}'
30 '(username={0.username}, password=...)'.format(self))
31 print(Credentials("alice", "Alice's password"))
32
33For more advanced examples -- see below the "if __name__ == '__main__':".
34"""
35
36import collections
37from abc import ABCMeta, abstractproperty
38from functools import wraps
39from sys import version_info
40
41__all__ = ('namedtuple',)
42_namedtuple = collections.namedtuple
43
44
45class _NamedTupleABCMeta(ABCMeta):
46 '''The metaclass for the abstract base class + mix-in for named tuples.'''
47 def __new__(mcls, name, bases, namespace):
48 fields = namespace.get('_fields')
49 for base in bases:
50 if fields is not None:
51 break
52 fields = getattr(base, '_fields', None)
53 if not isinstance(fields, abstractproperty):
54 basetuple = _namedtuple(name, fields)
55 bases = (basetuple,) + bases
56 namespace.pop('_fields', None)
57 namespace.setdefault('__doc__', basetuple.__doc__)
58 namespace.setdefault('__slots__', ())
59 return ABCMeta.__new__(mcls, name, bases, namespace)
60
61
62exec(
63 # Python 2.x metaclass declaration syntax
64 """class _NamedTupleABC(object):
65 '''The abstract base class + mix-in for named tuples.'''
66 __metaclass__ = _NamedTupleABCMeta
67 _fields = abstractproperty()""" if version_info[0] < 3 else
68 # Python 3.x metaclass declaration syntax
69 """class _NamedTupleABC(metaclass=_NamedTupleABCMeta):
70 '''The abstract base class + mix-in for named tuples.'''
71 _fields = abstractproperty()"""
72)
73
74
75_namedtuple.abc = _NamedTupleABC
76#_NamedTupleABC.register(type(version_info)) # (and similar, in the future...)
77
78@wraps(_namedtuple)
79def namedtuple(*args, **kwargs):
80 '''Named tuple factory with namedtuple.abc subclass registration.'''
81 cls = _namedtuple(*args, **kwargs)
82 _NamedTupleABC.register(cls)
83 return cls
84
85collections.namedtuple = namedtuple
86
87
88
89
90if __name__ == '__main__':
91
92 '''Examples and explanations'''
93
94 # Simple usage
95
96 class MyRecord(namedtuple.abc):
97 _fields = 'x y z' # such form will be transformed into ('x', 'y', 'z')
98 def _my_custom_method(self):
99 return list(self._asdict().items())
100 # (the '_fields' attribute belongs to the named tuple public API anyway)
101
102 rec = MyRecord(1, 2, 3)
103 print(rec)
104 print(rec._my_custom_method())
105 print(rec._replace(y=222))
106 print(rec._replace(y=222)._my_custom_method())
107
108 # Custom abstract classes...
109
110 class MyAbstractRecord(namedtuple.abc):
111 def _my_custom_method(self):
112 return list(self._asdict().items())
113
114 try:
115 MyAbstractRecord() # (abstract classes cannot be instantiated)
116 except TypeError as exc:
117 print(exc)
118
119 class AnotherAbstractRecord(MyAbstractRecord):
120 def __str__(self):
121 return '<<<{0}>>>'.format(super(AnotherAbstractRecord,
122 self).__str__())
123
124 # ...and their non-abstract subclasses
125
126 class MyRecord2(MyAbstractRecord):
127 _fields = 'a, b'
128
129 class MyRecord3(AnotherAbstractRecord):
130 _fields = 'p', 'q', 'r'
131
132 rec2 = MyRecord2('foo', 'bar')
133 print(rec2)
134 print(rec2._my_custom_method())
135 print(rec2._replace(b=222))
136 print(rec2._replace(b=222)._my_custom_method())
137
138 rec3 = MyRecord3('foo', 'bar', 'baz')
139 print(rec3)
140 print(rec3._my_custom_method())
141 print(rec3._replace(q=222))
142 print(rec3._replace(q=222)._my_custom_method())
143
144 # You can also subclass non-abstract ones...
145
146 class MyRecord33(MyRecord3):
147 def __str__(self):
148 return '< {0!r}, ..., {0!r} >'.format(self.p, self.r)
149
150 rec33 = MyRecord33('foo', 'bar', 'baz')
151 print(rec33)
152 print(rec33._my_custom_method())
153 print(rec33._replace(q=222))
154 print(rec33._replace(q=222)._my_custom_method())
155
156 # ...and even override the magic '_fields' attribute again
157
158 class MyRecord345(MyRecord3):
159 _fields = 'e f g h i j k'
160
161 rec345 = MyRecord345(1, 2, 3, 4, 3, 2, 1)
162 print(rec345)
163 print(rec345._my_custom_method())
164 print(rec345._replace(f=222))
165 print(rec345._replace(f=222)._my_custom_method())
166
167 # Mixing-in some other classes is also possible:
168
169 class MyMixIn(object):
170 def method(self):
171 return "MyMixIn.method() called"
172 def _my_custom_method(self):
173 return "MyMixIn._my_custom_method() called"
174 def count(self, item):
175 return "MyMixIn.count({0}) called".format(item)
176 def _asdict(self): # (cannot override a namedtuple method, see below)
177 return "MyMixIn._asdict() called"
178
179 class MyRecord4(MyRecord33, MyMixIn): # mix-in on the right
180 _fields = 'j k l x'
181
182 class MyRecord5(MyMixIn, MyRecord33): # mix-in on the left
183 _fields = 'j k l x y'
184
185 rec4 = MyRecord4(1, 2, 3, 2)
186 print(rec4)
187 print(rec4.method())
188 print(rec4._my_custom_method()) # MyRecord33's
189 print(rec4.count(2)) # tuple's
190 print(rec4._replace(k=222))
191 print(rec4._replace(k=222).method())
192 print(rec4._replace(k=222)._my_custom_method()) # MyRecord33's
193 print(rec4._replace(k=222).count(8)) # tuple's
194
195 rec5 = MyRecord5(1, 2, 3, 2, 1)
196 print(rec5)
197 print(rec5.method())
198 print(rec5._my_custom_method()) # MyMixIn's
199 print(rec5.count(2)) # MyMixIn's
200 print(rec5._replace(k=222))
201 print(rec5._replace(k=222).method())
202 print(rec5._replace(k=222)._my_custom_method()) # MyMixIn's
203 print(rec5._replace(k=222).count(2)) # MyMixIn's
204
205 # Note that behavior: the standard namedtuple methods cannot be
206 # overridden by a foreign mix-in -- even if the mix-in is declared
207 # as the leftmost base class (but, obviously, you can override them
208 # in the defined class or its subclasses):
209
210 print(rec4._asdict()) # (returns a dict, not "MyMixIn._asdict() called")
211 print(rec5._asdict()) # (returns a dict, not "MyMixIn._asdict() called")
212
213 class MyRecord6(MyRecord33):
214 _fields = 'j k l x y z'
215 def _asdict(self):
216 return "MyRecord6._asdict() called"
217 rec6 = MyRecord6(1, 2, 3, 1, 2, 3)
218 print(rec6._asdict()) # (this returns "MyRecord6._asdict() called")
219
220 # All that record classes are real subclasses of namedtuple.abc:
221
222 assert issubclass(MyRecord, namedtuple.abc)
223 assert issubclass(MyAbstractRecord, namedtuple.abc)
224 assert issubclass(AnotherAbstractRecord, namedtuple.abc)
225 assert issubclass(MyRecord2, namedtuple.abc)
226 assert issubclass(MyRecord3, namedtuple.abc)
227 assert issubclass(MyRecord33, namedtuple.abc)
228 assert issubclass(MyRecord345, namedtuple.abc)
229 assert issubclass(MyRecord4, namedtuple.abc)
230 assert issubclass(MyRecord5, namedtuple.abc)
231 assert issubclass(MyRecord6, namedtuple.abc)
232
233 # ...but abstract ones are not subclasses of tuple
234 # (and this is what you probably want):
235
236 assert not issubclass(MyAbstractRecord, tuple)
237 assert not issubclass(AnotherAbstractRecord, tuple)
238
239 assert issubclass(MyRecord, tuple)
240 assert issubclass(MyRecord2, tuple)
241 assert issubclass(MyRecord3, tuple)
242 assert issubclass(MyRecord33, tuple)
243 assert issubclass(MyRecord345, tuple)
244 assert issubclass(MyRecord4, tuple)
245 assert issubclass(MyRecord5, tuple)
246 assert issubclass(MyRecord6, tuple)
247
248 # Named tuple classes created with namedtuple() factory function
249 # (in the "traditional" way) are registered as "virtual" subclasses
250 # of namedtuple.abc:
251
252 MyTuple = namedtuple('MyTuple', 'a b c')
253 mt = MyTuple(1, 2, 3)
254 assert issubclass(MyTuple, namedtuple.abc)
255 assert isinstance(mt, namedtuple.abc)
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
new file mode 100644
index 0000000000..25effc2200
--- /dev/null
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -0,0 +1,164 @@
1"""
2BitBake Parsers
3
4File parsers for the BitBake build tools.
5
6"""
7
8
9# Copyright (C) 2003, 2004 Chris Larson
10# Copyright (C) 2003, 2004 Phil Blundell
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27handlers = []
28
29import os
30import stat
31import logging
32import bb
33import bb.utils
34import bb.siggen
35
36logger = logging.getLogger("BitBake.Parsing")
37
38class ParseError(Exception):
39 """Exception raised when parsing fails"""
40 def __init__(self, msg, filename, lineno=0):
41 self.msg = msg
42 self.filename = filename
43 self.lineno = lineno
44 Exception.__init__(self, msg, filename, lineno)
45
46 def __str__(self):
47 if self.lineno:
48 return "ParseError at %s:%d: %s" % (self.filename, self.lineno, self.msg)
49 else:
50 return "ParseError in %s: %s" % (self.filename, self.msg)
51
52class SkipRecipe(Exception):
53 """Exception raised to skip this recipe"""
54
55class SkipPackage(SkipRecipe):
56 """Exception raised to skip this recipe (use SkipRecipe in new code)"""
57
58__mtime_cache = {}
59def cached_mtime(f):
60 if f not in __mtime_cache:
61 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
62 return __mtime_cache[f]
63
64def cached_mtime_noerror(f):
65 if f not in __mtime_cache:
66 try:
67 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
68 except OSError:
69 return 0
70 return __mtime_cache[f]
71
72def update_mtime(f):
73 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
74 return __mtime_cache[f]
75
76def update_cache(f):
77 if f in __mtime_cache:
78 logger.debug(1, "Updating mtime cache for %s" % f)
79 update_mtime(f)
80
81def mark_dependency(d, f):
82 if f.startswith('./'):
83 f = "%s/%s" % (os.getcwd(), f[2:])
84 deps = (d.getVar('__depends') or [])
85 s = (f, cached_mtime_noerror(f))
86 if s not in deps:
87 deps.append(s)
88 d.setVar('__depends', deps)
89
90def check_dependency(d, f):
91 s = (f, cached_mtime_noerror(f))
92 deps = (d.getVar('__depends') or [])
93 return s in deps
94
95def supports(fn, data):
96 """Returns true if we have a handler for this file, false otherwise"""
97 for h in handlers:
98 if h['supports'](fn, data):
99 return 1
100 return 0
101
102def handle(fn, data, include = 0):
103 """Call the handler that is appropriate for this file"""
104 for h in handlers:
105 if h['supports'](fn, data):
106 with data.inchistory.include(fn):
107 return h['handle'](fn, data, include)
108 raise ParseError("not a BitBake file", fn)
109
110def init(fn, data):
111 for h in handlers:
112 if h['supports'](fn):
113 return h['init'](data)
114
115def init_parser(d):
116 bb.parse.siggen = bb.siggen.init(d)
117
118def resolve_file(fn, d):
119 if not os.path.isabs(fn):
120 bbpath = d.getVar("BBPATH", True)
121 newfn, attempts = bb.utils.which(bbpath, fn, history=True)
122 for af in attempts:
123 mark_dependency(d, af)
124 if not newfn:
125 raise IOError("file %s not found in %s" % (fn, bbpath))
126 fn = newfn
127
128 mark_dependency(d, fn)
129 if not os.path.isfile(fn):
130 raise IOError("file %s not found" % fn)
131
132 return fn
133
134# Used by OpenEmbedded metadata
135__pkgsplit_cache__={}
136def vars_from_file(mypkg, d):
137 if not mypkg or not mypkg.endswith((".bb", ".bbappend")):
138 return (None, None, None)
139 if mypkg in __pkgsplit_cache__:
140 return __pkgsplit_cache__[mypkg]
141
142 myfile = os.path.splitext(os.path.basename(mypkg))
143 parts = myfile[0].split('_')
144 __pkgsplit_cache__[mypkg] = parts
145 if len(parts) > 3:
146 raise ParseError("Unable to generate default variables from filename (too many underscores)", mypkg)
147 exp = 3 - len(parts)
148 tmplist = []
149 while exp != 0:
150 exp -= 1
151 tmplist.append(None)
152 parts.extend(tmplist)
153 return parts
154
155def get_file_depends(d):
156 '''Return the dependent files'''
157 dep_files = []
158 depends = d.getVar('__base_depends', True) or []
159 depends = depends + (d.getVar('__depends', True) or [])
160 for (fn, _) in depends:
161 dep_files.append(os.path.abspath(fn))
162 return " ".join(dep_files)
163
164from bb.parse.parse_py import __version__, ConfHandler, BBHandler
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
new file mode 100644
index 0000000000..4b10ee7013
--- /dev/null
+++ b/bitbake/lib/bb/parse/ast.py
@@ -0,0 +1,482 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4 AbstractSyntaxTree classes for the Bitbake language
5"""
6
7# Copyright (C) 2003, 2004 Chris Larson
8# Copyright (C) 2003, 2004 Phil Blundell
9# Copyright (C) 2009 Holger Hans Peter Freyther
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24from __future__ import absolute_import
25from future_builtins import filter
26import re
27import string
28import logging
29import bb
30import itertools
31from bb import methodpool
32from bb.parse import logger
33
34_bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]")
35
36class StatementGroup(list):
37 def eval(self, data):
38 for statement in self:
39 statement.eval(data)
40
41class AstNode(object):
42 def __init__(self, filename, lineno):
43 self.filename = filename
44 self.lineno = lineno
45
46class IncludeNode(AstNode):
47 def __init__(self, filename, lineno, what_file, force):
48 AstNode.__init__(self, filename, lineno)
49 self.what_file = what_file
50 self.force = force
51
52 def eval(self, data):
53 """
54 Include the file and evaluate the statements
55 """
56 s = data.expand(self.what_file)
57 logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
58
59 # TODO: Cache those includes... maybe not here though
60 if self.force:
61 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, "include required")
62 else:
63 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False)
64
65class ExportNode(AstNode):
66 def __init__(self, filename, lineno, var):
67 AstNode.__init__(self, filename, lineno)
68 self.var = var
69
70 def eval(self, data):
71 data.setVarFlag(self.var, "export", 1, op = 'exported')
72
73class DataNode(AstNode):
74 """
75 Various data related updates. For the sake of sanity
76 we have one class doing all this. This means that all
77 this need to be re-evaluated... we might be able to do
78 that faster with multiple classes.
79 """
80 def __init__(self, filename, lineno, groupd):
81 AstNode.__init__(self, filename, lineno)
82 self.groupd = groupd
83
84 def getFunc(self, key, data):
85 if 'flag' in self.groupd and self.groupd['flag'] != None:
86 return data.getVarFlag(key, self.groupd['flag'], noweakdefault=True)
87 else:
88 return data.getVar(key, noweakdefault=True)
89
90 def eval(self, data):
91 groupd = self.groupd
92 key = groupd["var"]
93 loginfo = {
94 'variable': key,
95 'file': self.filename,
96 'line': self.lineno,
97 }
98 if "exp" in groupd and groupd["exp"] != None:
99 data.setVarFlag(key, "export", 1, op = 'exported', **loginfo)
100
101 op = "set"
102 if "ques" in groupd and groupd["ques"] != None:
103 val = self.getFunc(key, data)
104 op = "set?"
105 if val == None:
106 val = groupd["value"]
107 elif "colon" in groupd and groupd["colon"] != None:
108 e = data.createCopy()
109 bb.data.update_data(e)
110 op = "immediate"
111 val = e.expand(groupd["value"], key + "[:=]")
112 elif "append" in groupd and groupd["append"] != None:
113 op = "append"
114 val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"])
115 elif "prepend" in groupd and groupd["prepend"] != None:
116 op = "prepend"
117 val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or ""))
118 elif "postdot" in groupd and groupd["postdot"] != None:
119 op = "postdot"
120 val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"])
121 elif "predot" in groupd and groupd["predot"] != None:
122 op = "predot"
123 val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or ""))
124 else:
125 val = groupd["value"]
126
127 flag = None
128 if 'flag' in groupd and groupd['flag'] != None:
129 flag = groupd['flag']
130 elif groupd["lazyques"]:
131 flag = "defaultval"
132
133 loginfo['op'] = op
134 loginfo['detail'] = groupd["value"]
135
136 if flag:
137 data.setVarFlag(key, flag, val, **loginfo)
138 else:
139 data.setVar(key, val, **loginfo)
140
141class MethodNode(AstNode):
142 tr_tbl = string.maketrans('/.+-@%', '______')
143
144 def __init__(self, filename, lineno, func_name, body):
145 AstNode.__init__(self, filename, lineno)
146 self.func_name = func_name
147 self.body = body
148
149 def eval(self, data):
150 text = '\n'.join(self.body)
151 if self.func_name == "__anonymous":
152 funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
153 text = "def %s(d):\n" % (funcname) + text
154 bb.methodpool.insert_method(funcname, text, self.filename)
155 anonfuncs = data.getVar('__BBANONFUNCS') or []
156 anonfuncs.append(funcname)
157 data.setVar('__BBANONFUNCS', anonfuncs)
158 data.setVar(funcname, text)
159 else:
160 data.setVarFlag(self.func_name, "func", 1)
161 data.setVar(self.func_name, text)
162
163class PythonMethodNode(AstNode):
164 def __init__(self, filename, lineno, function, modulename, body):
165 AstNode.__init__(self, filename, lineno)
166 self.function = function
167 self.modulename = modulename
168 self.body = body
169
170 def eval(self, data):
171 # Note we will add root to parsedmethods after having parse
172 # 'this' file. This means we will not parse methods from
173 # bb classes twice
174 text = '\n'.join(self.body)
175 bb.methodpool.insert_method(self.modulename, text, self.filename)
176 data.setVarFlag(self.function, "func", 1)
177 data.setVarFlag(self.function, "python", 1)
178 data.setVar(self.function, text)
179
180class MethodFlagsNode(AstNode):
181 def __init__(self, filename, lineno, key, m):
182 AstNode.__init__(self, filename, lineno)
183 self.key = key
184 self.m = m
185
186 def eval(self, data):
187 if data.getVar(self.key):
188 # clean up old version of this piece of metadata, as its
189 # flags could cause problems
190 data.setVarFlag(self.key, 'python', None)
191 data.setVarFlag(self.key, 'fakeroot', None)
192 if self.m.group("py") is not None:
193 data.setVarFlag(self.key, "python", "1")
194 else:
195 data.delVarFlag(self.key, "python")
196 if self.m.group("fr") is not None:
197 data.setVarFlag(self.key, "fakeroot", "1")
198 else:
199 data.delVarFlag(self.key, "fakeroot")
200
201class ExportFuncsNode(AstNode):
202 def __init__(self, filename, lineno, fns, classname):
203 AstNode.__init__(self, filename, lineno)
204 self.n = fns.split()
205 self.classname = classname
206
207 def eval(self, data):
208
209 for func in self.n:
210 calledfunc = self.classname + "_" + func
211
212 if data.getVar(func) and not data.getVarFlag(func, 'export_func'):
213 continue
214
215 if data.getVar(func):
216 data.setVarFlag(func, 'python', None)
217 data.setVarFlag(func, 'func', None)
218
219 for flag in [ "func", "python" ]:
220 if data.getVarFlag(calledfunc, flag):
221 data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag))
222 for flag in [ "dirs" ]:
223 if data.getVarFlag(func, flag):
224 data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag))
225
226 if data.getVarFlag(calledfunc, "python"):
227 data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n")
228 else:
229 if "-" in self.classname:
230 bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
231 data.setVar(func, " " + calledfunc + "\n")
232 data.setVarFlag(func, 'export_func', '1')
233
234class AddTaskNode(AstNode):
235 def __init__(self, filename, lineno, func, before, after):
236 AstNode.__init__(self, filename, lineno)
237 self.func = func
238 self.before = before
239 self.after = after
240
241 def eval(self, data):
242 bb.build.addtask(self.func, self.before, self.after, data)
243
244class DelTaskNode(AstNode):
245 def __init__(self, filename, lineno, func):
246 AstNode.__init__(self, filename, lineno)
247 self.func = func
248
249 def eval(self, data):
250 bb.build.deltask(self.func, data)
251
252class BBHandlerNode(AstNode):
253 def __init__(self, filename, lineno, fns):
254 AstNode.__init__(self, filename, lineno)
255 self.hs = fns.split()
256
257 def eval(self, data):
258 bbhands = data.getVar('__BBHANDLERS') or []
259 for h in self.hs:
260 bbhands.append(h)
261 data.setVarFlag(h, "handler", 1)
262 data.setVar('__BBHANDLERS', bbhands)
263
264class InheritNode(AstNode):
265 def __init__(self, filename, lineno, classes):
266 AstNode.__init__(self, filename, lineno)
267 self.classes = classes
268
269 def eval(self, data):
270 bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
271
272def handleInclude(statements, filename, lineno, m, force):
273 statements.append(IncludeNode(filename, lineno, m.group(1), force))
274
275def handleExport(statements, filename, lineno, m):
276 statements.append(ExportNode(filename, lineno, m.group(1)))
277
278def handleData(statements, filename, lineno, groupd):
279 statements.append(DataNode(filename, lineno, groupd))
280
281def handleMethod(statements, filename, lineno, func_name, body):
282 statements.append(MethodNode(filename, lineno, func_name, body))
283
284def handlePythonMethod(statements, filename, lineno, funcname, modulename, body):
285 statements.append(PythonMethodNode(filename, lineno, funcname, modulename, body))
286
287def handleMethodFlags(statements, filename, lineno, key, m):
288 statements.append(MethodFlagsNode(filename, lineno, key, m))
289
290def handleExportFuncs(statements, filename, lineno, m, classname):
291 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
292
293def handleAddTask(statements, filename, lineno, m):
294 func = m.group("func")
295 before = m.group("before")
296 after = m.group("after")
297 if func is None:
298 return
299
300 statements.append(AddTaskNode(filename, lineno, func, before, after))
301
302def handleDelTask(statements, filename, lineno, m):
303 func = m.group("func")
304 if func is None:
305 return
306
307 statements.append(DelTaskNode(filename, lineno, func))
308
309def handleBBHandlers(statements, filename, lineno, m):
310 statements.append(BBHandlerNode(filename, lineno, m.group(1)))
311
312def handleInherit(statements, filename, lineno, m):
313 classes = m.group(1)
314 statements.append(InheritNode(filename, lineno, classes))
315
316def finalize(fn, d, variant = None):
317 all_handlers = {}
318 for var in d.getVar('__BBHANDLERS') or []:
319 # try to add the handler
320 bb.event.register(var, d.getVar(var), (d.getVarFlag(var, "eventmask", True) or "").split())
321
322 bb.event.fire(bb.event.RecipePreFinalise(fn), d)
323
324 bb.data.expandKeys(d)
325 bb.data.update_data(d)
326 code = []
327 for funcname in d.getVar("__BBANONFUNCS") or []:
328 code.append("%s(d)" % funcname)
329 bb.utils.better_exec("\n".join(code), {"d": d})
330 bb.data.update_data(d)
331
332 tasklist = d.getVar('__BBTASKS') or []
333 deltasklist = d.getVar('__BBDELTASKS') or []
334 bb.build.add_tasks(tasklist, deltasklist, d)
335
336 bb.parse.siggen.finalise(fn, d, variant)
337
338 d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
339
340 bb.event.fire(bb.event.RecipeParsed(fn), d)
341
342def _create_variants(datastores, names, function, onlyfinalise):
343 def create_variant(name, orig_d, arg = None):
344 if onlyfinalise and name not in onlyfinalise:
345 return
346 new_d = bb.data.createCopy(orig_d)
347 function(arg or name, new_d)
348 datastores[name] = new_d
349
350 for variant, variant_d in datastores.items():
351 for name in names:
352 if not variant:
353 # Based on main recipe
354 create_variant(name, variant_d)
355 else:
356 create_variant("%s-%s" % (variant, name), variant_d, name)
357
358def _expand_versions(versions):
359 def expand_one(version, start, end):
360 for i in xrange(start, end + 1):
361 ver = _bbversions_re.sub(str(i), version, 1)
362 yield ver
363
364 versions = iter(versions)
365 while True:
366 try:
367 version = next(versions)
368 except StopIteration:
369 break
370
371 range_ver = _bbversions_re.search(version)
372 if not range_ver:
373 yield version
374 else:
375 newversions = expand_one(version, int(range_ver.group("from")),
376 int(range_ver.group("to")))
377 versions = itertools.chain(newversions, versions)
378
379def multi_finalize(fn, d):
380 appends = (d.getVar("__BBAPPEND", True) or "").split()
381 for append in appends:
382 logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
383 bb.parse.BBHandler.handle(append, d, True)
384
385 onlyfinalise = d.getVar("__ONLYFINALISE", False)
386
387 safe_d = d
388 d = bb.data.createCopy(safe_d)
389 try:
390 finalize(fn, d)
391 except bb.parse.SkipRecipe as e:
392 d.setVar("__SKIPPED", e.args[0])
393 datastores = {"": safe_d}
394
395 versions = (d.getVar("BBVERSIONS", True) or "").split()
396 if versions:
397 pv = orig_pv = d.getVar("PV", True)
398 baseversions = {}
399
400 def verfunc(ver, d, pv_d = None):
401 if pv_d is None:
402 pv_d = d
403
404 overrides = d.getVar("OVERRIDES", True).split(":")
405 pv_d.setVar("PV", ver)
406 overrides.append(ver)
407 bpv = baseversions.get(ver) or orig_pv
408 pv_d.setVar("BPV", bpv)
409 overrides.append(bpv)
410 d.setVar("OVERRIDES", ":".join(overrides))
411
412 versions = list(_expand_versions(versions))
413 for pos, version in enumerate(list(versions)):
414 try:
415 pv, bpv = version.split(":", 2)
416 except ValueError:
417 pass
418 else:
419 versions[pos] = pv
420 baseversions[pv] = bpv
421
422 if pv in versions and not baseversions.get(pv):
423 versions.remove(pv)
424 else:
425 pv = versions.pop()
426
427 # This is necessary because our existing main datastore
428 # has already been finalized with the old PV, we need one
429 # that's been finalized with the new PV.
430 d = bb.data.createCopy(safe_d)
431 verfunc(pv, d, safe_d)
432 try:
433 finalize(fn, d)
434 except bb.parse.SkipRecipe as e:
435 d.setVar("__SKIPPED", e.args[0])
436
437 _create_variants(datastores, versions, verfunc, onlyfinalise)
438
439 extended = d.getVar("BBCLASSEXTEND", True) or ""
440 if extended:
441 # the following is to support bbextends with arguments, for e.g. multilib
442 # an example is as follows:
443 # BBCLASSEXTEND = "multilib:lib32"
444 # it will create foo-lib32, inheriting multilib.bbclass and set
445 # BBEXTENDCURR to "multilib" and BBEXTENDVARIANT to "lib32"
446 extendedmap = {}
447 variantmap = {}
448
449 for ext in extended.split():
450 eext = ext.split(':', 2)
451 if len(eext) > 1:
452 extendedmap[ext] = eext[0]
453 variantmap[ext] = eext[1]
454 else:
455 extendedmap[ext] = ext
456
457 pn = d.getVar("PN", True)
458 def extendfunc(name, d):
459 if name != extendedmap[name]:
460 d.setVar("BBEXTENDCURR", extendedmap[name])
461 d.setVar("BBEXTENDVARIANT", variantmap[name])
462 else:
463 d.setVar("PN", "%s-%s" % (pn, name))
464 bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d)
465
466 safe_d.setVar("BBCLASSEXTEND", extended)
467 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
468
469 for variant, variant_d in datastores.iteritems():
470 if variant:
471 try:
472 if not onlyfinalise or variant in onlyfinalise:
473 finalize(fn, variant_d, variant)
474 except bb.parse.SkipRecipe as e:
475 variant_d.setVar("__SKIPPED", e.args[0])
476
477 if len(datastores) > 1:
478 variants = filter(None, datastores.iterkeys())
479 safe_d.setVar("__VARIANTS", " ".join(variants))
480
481 datastores[""] = d
482 return datastores
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
new file mode 100644
index 0000000000..9633340d1b
--- /dev/null
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -0,0 +1,261 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5 class for handling .bb files
6
7 Reads a .bb file and obtains its metadata
8
9"""
10
11
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14#
15# This program is free software; you can redistribute it and/or modify
16# it under the terms of the GNU General Public License version 2 as
17# published by the Free Software Foundation.
18#
19# This program is distributed in the hope that it will be useful,
20# but WITHOUT ANY WARRANTY; without even the implied warranty of
21# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22# GNU General Public License for more details.
23#
24# You should have received a copy of the GNU General Public License along
25# with this program; if not, write to the Free Software Foundation, Inc.,
26# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
27
28from __future__ import absolute_import
29import re, bb, os
30import logging
31import bb.build, bb.utils
32from bb import data
33
34from . import ConfHandler
35from .. import resolve_file, ast, logger
36from .ConfHandler import include, init
37
38# For compatibility
39bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
40
41__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
42__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
43__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
44__addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
45__deltask_regexp__ = re.compile("deltask\s+(?P<func>\w+)")
46__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
47__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
48__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
49
50
51__infunc__ = ""
52__inpython__ = False
53__body__ = []
54__classname__ = ""
55
56cached_statements = {}
57
58# We need to indicate EOF to the feeder. This code is so messy that
59# factoring it out to a close_parse_file method is out of question.
60# We will use the IN_PYTHON_EOF as an indicator to just close the method
61#
62# The two parts using it are tightly integrated anyway
63IN_PYTHON_EOF = -9999999999999
64
65
66
67def supports(fn, d):
68 """Return True if fn has a supported extension"""
69 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
70
71def inherit(files, fn, lineno, d):
72 __inherit_cache = d.getVar('__inherit_cache') or []
73 files = d.expand(files).split()
74 for file in files:
75 if not os.path.isabs(file) and not file.endswith(".bbclass"):
76 file = os.path.join('classes', '%s.bbclass' % file)
77
78 if not os.path.isabs(file):
79 dname = os.path.dirname(fn)
80 bbpath = "%s:%s" % (dname, d.getVar("BBPATH", True))
81 abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
82 for af in attempts:
83 if af != abs_fn:
84 bb.parse.mark_dependency(d, af)
85 if abs_fn:
86 file = abs_fn
87
88 if not file in __inherit_cache:
89 logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno))
90 __inherit_cache.append( file )
91 d.setVar('__inherit_cache', __inherit_cache)
92 include(fn, file, lineno, d, "inherit")
93 __inherit_cache = d.getVar('__inherit_cache') or []
94
95def get_statements(filename, absolute_filename, base_name):
96 global cached_statements
97
98 try:
99 return cached_statements[absolute_filename]
100 except KeyError:
101 file = open(absolute_filename, 'r')
102 statements = ast.StatementGroup()
103
104 lineno = 0
105 while True:
106 lineno = lineno + 1
107 s = file.readline()
108 if not s: break
109 s = s.rstrip()
110 feeder(lineno, s, filename, base_name, statements)
111 file.close()
112 if __inpython__:
113 # add a blank line to close out any python definition
114 feeder(IN_PYTHON_EOF, "", filename, base_name, statements)
115
116 if filename.endswith(".bbclass") or filename.endswith(".inc"):
117 cached_statements[absolute_filename] = statements
118 return statements
119
120def handle(fn, d, include):
121 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
122 __body__ = []
123 __infunc__ = ""
124 __classname__ = ""
125 __residue__ = []
126
127 base_name = os.path.basename(fn)
128 (root, ext) = os.path.splitext(base_name)
129 init(d)
130
131 if ext == ".bbclass":
132 __classname__ = root
133 __inherit_cache = d.getVar('__inherit_cache') or []
134 if not fn in __inherit_cache:
135 __inherit_cache.append(fn)
136 d.setVar('__inherit_cache', __inherit_cache)
137
138 if include != 0:
139 oldfile = d.getVar('FILE')
140 else:
141 oldfile = None
142
143 abs_fn = resolve_file(fn, d)
144
145 if include:
146 bb.parse.mark_dependency(d, abs_fn)
147
148 # actual loading
149 statements = get_statements(fn, abs_fn, base_name)
150
151 # DONE WITH PARSING... time to evaluate
152 if ext != ".bbclass":
153 d.setVar('FILE', abs_fn)
154
155 try:
156 statements.eval(d)
157 except bb.parse.SkipRecipe:
158 bb.data.setVar("__SKIPPED", True, d)
159 if include == 0:
160 return { "" : d }
161
162 if ext != ".bbclass" and include == 0:
163 return ast.multi_finalize(fn, d)
164
165 if oldfile:
166 d.setVar("FILE", oldfile)
167
168 return d
169
170def feeder(lineno, s, fn, root, statements):
171 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
172 if __infunc__:
173 if s == '}':
174 __body__.append('')
175 ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
176 __infunc__ = ""
177 __body__ = []
178 else:
179 __body__.append(s)
180 return
181
182 if __inpython__:
183 m = __python_func_regexp__.match(s)
184 if m and lineno != IN_PYTHON_EOF:
185 __body__.append(s)
186 return
187 else:
188 ast.handlePythonMethod(statements, fn, lineno, __inpython__,
189 root, __body__)
190 __body__ = []
191 __inpython__ = False
192
193 if lineno == IN_PYTHON_EOF:
194 return
195
196 if s and s[0] == '#':
197 if len(__residue__) != 0 and __residue__[0][0] != "#":
198 bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
199
200 if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"):
201 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
202
203 if s and s[-1] == '\\':
204 __residue__.append(s[:-1])
205 return
206
207 s = "".join(__residue__) + s
208 __residue__ = []
209
210 # Skip empty lines
211 if s == '':
212 return
213
214 # Skip comments
215 if s[0] == '#':
216 return
217
218 m = __func_start_regexp__.match(s)
219 if m:
220 __infunc__ = m.group("func") or "__anonymous"
221 ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
222 return
223
224 m = __def_regexp__.match(s)
225 if m:
226 __body__.append(s)
227 __inpython__ = m.group(1)
228
229 return
230
231 m = __export_func_regexp__.match(s)
232 if m:
233 ast.handleExportFuncs(statements, fn, lineno, m, __classname__)
234 return
235
236 m = __addtask_regexp__.match(s)
237 if m:
238 ast.handleAddTask(statements, fn, lineno, m)
239 return
240
241 m = __deltask_regexp__.match(s)
242 if m:
243 ast.handleDelTask(statements, fn, lineno, m)
244 return
245
246 m = __addhandler_regexp__.match(s)
247 if m:
248 ast.handleBBHandlers(statements, fn, lineno, m)
249 return
250
251 m = __inherit_regexp__.match(s)
252 if m:
253 ast.handleInherit(statements, fn, lineno, m)
254 return
255
256 return ConfHandler.feeder(lineno, s, fn, statements)
257
258# Add us to the handlers list
259from .. import handlers
260handlers.append({'supports': supports, 'handle': handle, 'init': init})
261del handlers
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
new file mode 100644
index 0000000000..978ebe4608
--- /dev/null
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -0,0 +1,189 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5 class for handling configuration data files
6
7 Reads a .conf file and obtains its metadata
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2003, 2004 Phil Blundell
13#
14# This program is free software; you can redistribute it and/or modify
15# it under the terms of the GNU General Public License version 2 as
16# published by the Free Software Foundation.
17#
18# This program is distributed in the hope that it will be useful,
19# but WITHOUT ANY WARRANTY; without even the implied warranty of
20# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21# GNU General Public License for more details.
22#
23# You should have received a copy of the GNU General Public License along
24# with this program; if not, write to the Free Software Foundation, Inc.,
25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26
27import re, os
28import logging
29import bb.utils
30from bb.parse import ParseError, resolve_file, ast, logger
31
32__config_regexp__ = re.compile( r"""
33 ^
34 (?P<exp>export\s*)?
35 (?P<var>[a-zA-Z0-9\-~_+.${}/]+?)
36 (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?
37
38 \s* (
39 (?P<colon>:=) |
40 (?P<lazyques>\?\?=) |
41 (?P<ques>\?=) |
42 (?P<append>\+=) |
43 (?P<prepend>=\+) |
44 (?P<predot>=\.) |
45 (?P<postdot>\.=) |
46 =
47 ) \s*
48
49 (?!'[^']*'[^']*'$)
50 (?!\"[^\"]*\"[^\"]*\"$)
51 (?P<apo>['\"])
52 (?P<value>.*)
53 (?P=apo)
54 $
55 """, re.X)
56__include_regexp__ = re.compile( r"include\s+(.+)" )
57__require_regexp__ = re.compile( r"require\s+(.+)" )
58__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
59
60def init(data):
61 topdir = data.getVar('TOPDIR')
62 if not topdir:
63 data.setVar('TOPDIR', os.getcwd())
64
65
66def supports(fn, d):
67 return fn[-5:] == ".conf"
68
69def include(oldfn, fn, lineno, data, error_out):
70 """
71 error_out: A string indicating the verb (e.g. "include", "inherit") to be
72 used in a ParseError that will be raised if the file to be included could
73 not be included. Specify False to avoid raising an error in this case.
74 """
75 if oldfn == fn: # prevent infinite recursion
76 return None
77
78 import bb
79 fn = data.expand(fn)
80 oldfn = data.expand(oldfn)
81
82 if not os.path.isabs(fn):
83 dname = os.path.dirname(oldfn)
84 bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
85 abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
86 if abs_fn and bb.parse.check_dependency(data, abs_fn):
87 bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
88 for af in attempts:
89 bb.parse.mark_dependency(data, af)
90 if abs_fn:
91 fn = abs_fn
92 elif bb.parse.check_dependency(data, fn):
93 bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
94
95 from bb.parse import handle
96 try:
97 ret = handle(fn, data, True)
98 except (IOError, OSError):
99 if error_out:
100 raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
101 logger.debug(2, "CONF file '%s' not found", fn)
102 bb.parse.mark_dependency(data, fn)
103
104# We have an issue where a UI might want to enforce particular settings such as
105# an empty DISTRO variable. If configuration files do something like assigning
106# a weak default, it turns out to be very difficult to filter out these changes,
107# particularly when the weak default might appear half way though parsing a chain
108# of configuration files. We therefore let the UIs hook into configuration file
109# parsing. This turns out to be a hard problem to solve any other way.
110confFilters = []
111
112def handle(fn, data, include):
113 init(data)
114
115 if include == 0:
116 oldfile = None
117 else:
118 oldfile = data.getVar('FILE')
119
120 abs_fn = resolve_file(fn, data)
121 f = open(abs_fn, 'r')
122
123 if include:
124 bb.parse.mark_dependency(data, abs_fn)
125
126 statements = ast.StatementGroup()
127 lineno = 0
128 while True:
129 lineno = lineno + 1
130 s = f.readline()
131 if not s:
132 break
133 w = s.strip()
134 # skip empty lines
135 if not w:
136 continue
137 s = s.rstrip()
138 while s[-1] == '\\':
139 s2 = f.readline().strip()
140 lineno = lineno + 1
141 if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
142 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
143 s = s[:-1] + s2
144 # skip comments
145 if s[0] == '#':
146 continue
147 feeder(lineno, s, abs_fn, statements)
148
149 # DONE WITH PARSING... time to evaluate
150 data.setVar('FILE', abs_fn)
151 statements.eval(data)
152 if oldfile:
153 data.setVar('FILE', oldfile)
154
155 f.close()
156
157 for f in confFilters:
158 f(fn, data)
159
160 return data
161
162def feeder(lineno, s, fn, statements):
163 m = __config_regexp__.match(s)
164 if m:
165 groupd = m.groupdict()
166 ast.handleData(statements, fn, lineno, groupd)
167 return
168
169 m = __include_regexp__.match(s)
170 if m:
171 ast.handleInclude(statements, fn, lineno, m, False)
172 return
173
174 m = __require_regexp__.match(s)
175 if m:
176 ast.handleInclude(statements, fn, lineno, m, True)
177 return
178
179 m = __export_regexp__.match(s)
180 if m:
181 ast.handleExport(statements, fn, lineno, m)
182 return
183
184 raise ParseError("unparsed line: '%s'" % s, fn, lineno);
185
186# Add us to the handlers list
187from bb.parse import handlers
188handlers.append({'supports': supports, 'handle': handle, 'init': init})
189del handlers
diff --git a/bitbake/lib/bb/parse/parse_py/__init__.py b/bitbake/lib/bb/parse/parse_py/__init__.py
new file mode 100644
index 0000000000..3e658d0de9
--- /dev/null
+++ b/bitbake/lib/bb/parse/parse_py/__init__.py
@@ -0,0 +1,33 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake Parsers
6
7File parsers for the BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2003, 2004 Phil Blundell
13#
14# This program is free software; you can redistribute it and/or modify
15# it under the terms of the GNU General Public License version 2 as
16# published by the Free Software Foundation.
17#
18# This program is distributed in the hope that it will be useful,
19# but WITHOUT ANY WARRANTY; without even the implied warranty of
20# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21# GNU General Public License for more details.
22#
23# You should have received a copy of the GNU General Public License along
24# with this program; if not, write to the Free Software Foundation, Inc.,
25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26#
27# Based on functions from the base bb module, Copyright 2003 Holger Schurig
28
29from __future__ import absolute_import
30from . import ConfHandler
31from . import BBHandler
32
33__version__ = '1.0'
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
new file mode 100644
index 0000000000..5795bc835b
--- /dev/null
+++ b/bitbake/lib/bb/persist_data.py
@@ -0,0 +1,217 @@
1"""BitBake Persistent Data Store
2
3Used to store data in a central location such that other threads/tasks can
4access them at some future date. Acts as a convenience wrapper around sqlite,
5currently, providing a key/value store accessed by 'domain'.
6"""
7
8# Copyright (C) 2007 Richard Purdie
9# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24import collections
25import logging
26import os.path
27import sys
28import warnings
29from bb.compat import total_ordering
30from collections import Mapping
31
32try:
33 import sqlite3
34except ImportError:
35 from pysqlite2 import dbapi2 as sqlite3
36
37sqlversion = sqlite3.sqlite_version_info
38if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
39 raise Exception("sqlite3 version 3.3.0 or later is required.")
40
41
42logger = logging.getLogger("BitBake.PersistData")
43if hasattr(sqlite3, 'enable_shared_cache'):
44 try:
45 sqlite3.enable_shared_cache(True)
46 except sqlite3.OperationalError:
47 pass
48
49
50@total_ordering
51class SQLTable(collections.MutableMapping):
52 """Object representing a table/domain in the database"""
53 def __init__(self, cachefile, table):
54 self.cachefile = cachefile
55 self.table = table
56 self.cursor = connect(self.cachefile)
57
58 self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
59 % table)
60
61 def _execute(self, *query):
62 """Execute a query, waiting to acquire a lock if necessary"""
63 count = 0
64 while True:
65 try:
66 return self.cursor.execute(*query)
67 except sqlite3.OperationalError as exc:
68 if 'database is locked' in str(exc) and count < 500:
69 count = count + 1
70 self.cursor.close()
71 self.cursor = connect(self.cachefile)
72 continue
73 raise
74
75 def __enter__(self):
76 self.cursor.__enter__()
77 return self
78
79 def __exit__(self, *excinfo):
80 self.cursor.__exit__(*excinfo)
81
82 def __getitem__(self, key):
83 data = self._execute("SELECT * from %s where key=?;" %
84 self.table, [key])
85 for row in data:
86 return row[1]
87 raise KeyError(key)
88
89 def __delitem__(self, key):
90 if key not in self:
91 raise KeyError(key)
92 self._execute("DELETE from %s where key=?;" % self.table, [key])
93
94 def __setitem__(self, key, value):
95 if not isinstance(key, basestring):
96 raise TypeError('Only string keys are supported')
97 elif not isinstance(value, basestring):
98 raise TypeError('Only string values are supported')
99
100 data = self._execute("SELECT * from %s where key=?;" %
101 self.table, [key])
102 exists = len(list(data))
103 if exists:
104 self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
105 [value, key])
106 else:
107 self._execute("INSERT into %s(key, value) values (?, ?);" %
108 self.table, [key, value])
109
110 def __contains__(self, key):
111 return key in set(self)
112
113 def __len__(self):
114 data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
115 for row in data:
116 return row[0]
117
118 def __iter__(self):
119 data = self._execute("SELECT key FROM %s;" % self.table)
120 return (row[0] for row in data)
121
122 def __lt__(self, other):
123 if not isinstance(other, Mapping):
124 raise NotImplemented
125
126 return len(self) < len(other)
127
128 def get_by_pattern(self, pattern):
129 data = self._execute("SELECT * FROM %s WHERE key LIKE ?;" %
130 self.table, [pattern])
131 return [row[1] for row in data]
132
133 def values(self):
134 return list(self.itervalues())
135
136 def itervalues(self):
137 data = self._execute("SELECT value FROM %s;" % self.table)
138 return (row[0] for row in data)
139
140 def items(self):
141 return list(self.iteritems())
142
143 def iteritems(self):
144 return self._execute("SELECT * FROM %s;" % self.table)
145
146 def clear(self):
147 self._execute("DELETE FROM %s;" % self.table)
148
149 def has_key(self, key):
150 return key in self
151
152
153class PersistData(object):
154 """Deprecated representation of the bitbake persistent data store"""
155 def __init__(self, d):
156 warnings.warn("Use of PersistData is deprecated. Please use "
157 "persist(domain, d) instead.",
158 category=DeprecationWarning,
159 stacklevel=2)
160
161 self.data = persist(d)
162 logger.debug(1, "Using '%s' as the persistent data cache",
163 self.data.filename)
164
165 def addDomain(self, domain):
166 """
167 Add a domain (pending deprecation)
168 """
169 return self.data[domain]
170
171 def delDomain(self, domain):
172 """
173 Removes a domain and all the data it contains
174 """
175 del self.data[domain]
176
177 def getKeyValues(self, domain):
178 """
179 Return a list of key + value pairs for a domain
180 """
181 return self.data[domain].items()
182
183 def getValue(self, domain, key):
184 """
185 Return the value of a key for a domain
186 """
187 return self.data[domain][key]
188
189 def setValue(self, domain, key, value):
190 """
191 Sets the value of a key for a domain
192 """
193 self.data[domain][key] = value
194
195 def delValue(self, domain, key):
196 """
197 Deletes a key/value pair
198 """
199 del self.data[domain][key]
200
201def connect(database):
202 connection = sqlite3.connect(database, timeout=5, isolation_level=None)
203 connection.execute("pragma synchronous = off;")
204 return connection
205
206def persist(domain, d):
207 """Convenience factory for SQLTable objects based upon metadata"""
208 import bb.utils
209 cachedir = (d.getVar("PERSISTENT_DIR", True) or
210 d.getVar("CACHE", True))
211 if not cachedir:
212 logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
213 sys.exit(1)
214
215 bb.utils.mkdirhier(cachedir)
216 cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
217 return SQLTable(cachefile, domain)
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
new file mode 100644
index 0000000000..8b1aea9a10
--- /dev/null
+++ b/bitbake/lib/bb/process.py
@@ -0,0 +1,133 @@
1import logging
2import signal
3import subprocess
4import errno
5import select
6
7logger = logging.getLogger('BitBake.Process')
8
9def subprocess_setup():
10 # Python installs a SIGPIPE handler by default. This is usually not what
11 # non-Python subprocesses expect.
12 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
13
14class CmdError(RuntimeError):
15 def __init__(self, command, msg=None):
16 self.command = command
17 self.msg = msg
18
19 def __str__(self):
20 if not isinstance(self.command, basestring):
21 cmd = subprocess.list2cmdline(self.command)
22 else:
23 cmd = self.command
24
25 msg = "Execution of '%s' failed" % cmd
26 if self.msg:
27 msg += ': %s' % self.msg
28 return msg
29
30class NotFoundError(CmdError):
31 def __str__(self):
32 return CmdError.__str__(self) + ": command not found"
33
34class ExecutionError(CmdError):
35 def __init__(self, command, exitcode, stdout = None, stderr = None):
36 CmdError.__init__(self, command)
37 self.exitcode = exitcode
38 self.stdout = stdout
39 self.stderr = stderr
40
41 def __str__(self):
42 message = ""
43 if self.stderr:
44 message += self.stderr
45 if self.stdout:
46 message += self.stdout
47 if message:
48 message = ":\n" + message
49 return (CmdError.__str__(self) +
50 " with exit code %s" % self.exitcode + message)
51
52class Popen(subprocess.Popen):
53 defaults = {
54 "close_fds": True,
55 "preexec_fn": subprocess_setup,
56 "stdout": subprocess.PIPE,
57 "stderr": subprocess.STDOUT,
58 "stdin": subprocess.PIPE,
59 "shell": False,
60 }
61
62 def __init__(self, *args, **kwargs):
63 options = dict(self.defaults)
64 options.update(kwargs)
65 subprocess.Popen.__init__(self, *args, **options)
66
67def _logged_communicate(pipe, log, input):
68 if pipe.stdin:
69 if input is not None:
70 pipe.stdin.write(input)
71 pipe.stdin.close()
72
73 outdata, errdata = [], []
74 rin = []
75
76 if pipe.stdout is not None:
77 bb.utils.nonblockingfd(pipe.stdout.fileno())
78 rin.append(pipe.stdout)
79 if pipe.stderr is not None:
80 bb.utils.nonblockingfd(pipe.stderr.fileno())
81 rin.append(pipe.stderr)
82
83 try:
84 while pipe.poll() is None:
85 rlist = rin
86 try:
87 r,w,e = select.select (rlist, [], [], 1)
88 except OSError as e:
89 if e.errno != errno.EINTR:
90 raise
91
92 if pipe.stdout in r:
93 data = pipe.stdout.read()
94 if data is not None:
95 outdata.append(data)
96 log.write(data)
97
98 if pipe.stderr in r:
99 data = pipe.stderr.read()
100 if data is not None:
101 errdata.append(data)
102 log.write(data)
103 finally:
104 log.flush()
105 if pipe.stdout is not None:
106 pipe.stdout.close()
107 if pipe.stderr is not None:
108 pipe.stderr.close()
109 return ''.join(outdata), ''.join(errdata)
110
111def run(cmd, input=None, log=None, **options):
112 """Convenience function to run a command and return its output, raising an
113 exception when the command fails"""
114
115 if isinstance(cmd, basestring) and not "shell" in options:
116 options["shell"] = True
117
118 try:
119 pipe = Popen(cmd, **options)
120 except OSError as exc:
121 if exc.errno == 2:
122 raise NotFoundError(cmd)
123 else:
124 raise CmdError(cmd, exc)
125
126 if log:
127 stdout, stderr = _logged_communicate(pipe, log, input)
128 else:
129 stdout, stderr = pipe.communicate(input)
130
131 if pipe.returncode != 0:
132 raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
133 return stdout, stderr
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
new file mode 100644
index 0000000000..637e1fab96
--- /dev/null
+++ b/bitbake/lib/bb/providers.py
@@ -0,0 +1,381 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Copyright (C) 2003, 2004 Chris Larson
5# Copyright (C) 2003, 2004 Phil Blundell
6# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
7# Copyright (C) 2005 Holger Hans Peter Freyther
8# Copyright (C) 2005 ROAD GmbH
9# Copyright (C) 2006 Richard Purdie
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24import re
25import logging
26from bb import data, utils
27from collections import defaultdict
28import bb
29
30logger = logging.getLogger("BitBake.Provider")
31
32class NoProvider(bb.BBHandledException):
33 """Exception raised when no provider of a build dependency can be found"""
34
35class NoRProvider(bb.BBHandledException):
36 """Exception raised when no provider of a runtime dependency can be found"""
37
38class MultipleRProvider(bb.BBHandledException):
39 """Exception raised when multiple providers of a runtime dependency can be found"""
40
41def findProviders(cfgData, dataCache, pkg_pn = None):
42 """
43 Convenience function to get latest and preferred providers in pkg_pn
44 """
45
46 if not pkg_pn:
47 pkg_pn = dataCache.pkg_pn
48
49 # Need to ensure data store is expanded
50 localdata = data.createCopy(cfgData)
51 bb.data.update_data(localdata)
52 bb.data.expandKeys(localdata)
53
54 preferred_versions = {}
55 latest_versions = {}
56
57 for pn in pkg_pn:
58 (last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn)
59 preferred_versions[pn] = (pref_ver, pref_file)
60 latest_versions[pn] = (last_ver, last_file)
61
62 return (latest_versions, preferred_versions)
63
64
65def allProviders(dataCache):
66 """
67 Find all providers for each pn
68 """
69 all_providers = defaultdict(list)
70 for (fn, pn) in dataCache.pkg_fn.items():
71 ver = dataCache.pkg_pepvpr[fn]
72 all_providers[pn].append((ver, fn))
73 return all_providers
74
75
76def sortPriorities(pn, dataCache, pkg_pn = None):
77 """
78 Reorder pkg_pn by file priority and default preference
79 """
80
81 if not pkg_pn:
82 pkg_pn = dataCache.pkg_pn
83
84 files = pkg_pn[pn]
85 priorities = {}
86 for f in files:
87 priority = dataCache.bbfile_priority[f]
88 preference = dataCache.pkg_dp[f]
89 if priority not in priorities:
90 priorities[priority] = {}
91 if preference not in priorities[priority]:
92 priorities[priority][preference] = []
93 priorities[priority][preference].append(f)
94 tmp_pn = []
95 for pri in sorted(priorities):
96 tmp_pref = []
97 for pref in sorted(priorities[pri]):
98 tmp_pref.extend(priorities[pri][pref])
99 tmp_pn = [tmp_pref] + tmp_pn
100
101 return tmp_pn
102
103def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
104 """
105 Check if the version pe,pv,pr is the preferred one.
106 If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%'
107 """
108 if (pr == preferred_r or preferred_r == None):
109 if (pe == preferred_e or preferred_e == None):
110 if preferred_v == pv:
111 return True
112 if preferred_v != None and preferred_v.endswith('%') and pv.startswith(preferred_v[:len(preferred_v)-1]):
113 return True
114 return False
115
116def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
117 """
118 Find the first provider in pkg_pn with a PREFERRED_VERSION set.
119 """
120
121 preferred_file = None
122 preferred_ver = None
123
124 localdata = data.createCopy(cfgData)
125 localdata.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn))
126 bb.data.update_data(localdata)
127
128 preferred_v = localdata.getVar('PREFERRED_VERSION', True)
129 if preferred_v:
130 m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
131 if m:
132 if m.group(1):
133 preferred_e = m.group(1)[:-1]
134 else:
135 preferred_e = None
136 preferred_v = m.group(2)
137 if m.group(3):
138 preferred_r = m.group(3)[1:]
139 else:
140 preferred_r = None
141 else:
142 preferred_e = None
143 preferred_r = None
144
145 for file_set in pkg_pn:
146 for f in file_set:
147 pe, pv, pr = dataCache.pkg_pepvpr[f]
148 if preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
149 preferred_file = f
150 preferred_ver = (pe, pv, pr)
151 break
152 if preferred_file:
153 break;
154 if preferred_r:
155 pv_str = '%s-%s' % (preferred_v, preferred_r)
156 else:
157 pv_str = preferred_v
158 if not (preferred_e is None):
159 pv_str = '%s:%s' % (preferred_e, pv_str)
160 itemstr = ""
161 if item:
162 itemstr = " (for item %s)" % item
163 if preferred_file is None:
164 logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
165 available_vers = []
166 for file_set in pkg_pn:
167 for f in file_set:
168 pe, pv, pr = dataCache.pkg_pepvpr[f]
169 ver_str = pv
170 if pe:
171 ver_str = "%s:%s" % (pe, ver_str)
172 if not ver_str in available_vers:
173 available_vers.append(ver_str)
174 if available_vers:
175 available_vers.sort()
176 logger.info("versions of %s available: %s", pn, ' '.join(available_vers))
177 else:
178 logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
179
180 return (preferred_ver, preferred_file)
181
182
183def findLatestProvider(pn, cfgData, dataCache, file_set):
184 """
185 Return the highest version of the providers in file_set.
186 Take default preferences into account.
187 """
188 latest = None
189 latest_p = 0
190 latest_f = None
191 for file_name in file_set:
192 pe, pv, pr = dataCache.pkg_pepvpr[file_name]
193 dp = dataCache.pkg_dp[file_name]
194
195 if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p):
196 latest = (pe, pv, pr)
197 latest_f = file_name
198 latest_p = dp
199
200 return (latest, latest_f)
201
202
203def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
204 """
205 If there is a PREFERRED_VERSION, find the highest-priority bbfile
206 providing that version. If not, find the latest version provided by
207 an bbfile in the highest-priority set.
208 """
209
210 sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
211 # Find the highest priority provider with a PREFERRED_VERSION set
212 (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
213 # Find the latest version of the highest priority provider
214 (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
215
216 if preferred_file is None:
217 preferred_file = latest_f
218 preferred_ver = latest
219
220 return (latest, latest_f, preferred_ver, preferred_file)
221
222
223def _filterProviders(providers, item, cfgData, dataCache):
224 """
225 Take a list of providers and filter/reorder according to the
226 environment variables and previous build results
227 """
228 eligible = []
229 preferred_versions = {}
230 sortpkg_pn = {}
231
232 # The order of providers depends on the order of the files on the disk
233 # up to here. Sort pkg_pn to make dependency issues reproducible rather
234 # than effectively random.
235 providers.sort()
236
237 # Collate providers by PN
238 pkg_pn = {}
239 for p in providers:
240 pn = dataCache.pkg_fn[p]
241 if pn not in pkg_pn:
242 pkg_pn[pn] = []
243 pkg_pn[pn].append(p)
244
245 logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
246
247 # First add PREFERRED_VERSIONS
248 for pn in pkg_pn:
249 sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
250 preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
251 if preferred_versions[pn][1]:
252 eligible.append(preferred_versions[pn][1])
253
254 # Now add latest versions
255 for pn in sortpkg_pn:
256 if pn in preferred_versions and preferred_versions[pn][1]:
257 continue
258 preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0])
259 eligible.append(preferred_versions[pn][1])
260
261 if len(eligible) == 0:
262 logger.error("no eligible providers for %s", item)
263 return 0
264
265 # If pn == item, give it a slight default preference
266 # This means PREFERRED_PROVIDER_foobar defaults to foobar if available
267 for p in providers:
268 pn = dataCache.pkg_fn[p]
269 if pn != item:
270 continue
271 (newvers, fn) = preferred_versions[pn]
272 if not fn in eligible:
273 continue
274 eligible.remove(fn)
275 eligible = [fn] + eligible
276
277 return eligible
278
279
280def filterProviders(providers, item, cfgData, dataCache):
281 """
282 Take a list of providers and filter/reorder according to the
283 environment variables and previous build results
284 Takes a "normal" target item
285 """
286
287 eligible = _filterProviders(providers, item, cfgData, dataCache)
288
289 prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, True)
290 if prefervar:
291 dataCache.preferred[item] = prefervar
292
293 foundUnique = False
294 if item in dataCache.preferred:
295 for p in eligible:
296 pn = dataCache.pkg_fn[p]
297 if dataCache.preferred[item] == pn:
298 logger.verbose("selecting %s to satisfy %s due to PREFERRED_PROVIDERS", pn, item)
299 eligible.remove(p)
300 eligible = [p] + eligible
301 foundUnique = True
302 break
303
304 logger.debug(1, "sorted providers for %s are: %s", item, eligible)
305
306 return eligible, foundUnique
307
308def filterProvidersRunTime(providers, item, cfgData, dataCache):
309 """
310 Take a list of providers and filter/reorder according to the
311 environment variables and previous build results
312 Takes a "runtime" target item
313 """
314
315 eligible = _filterProviders(providers, item, cfgData, dataCache)
316
317 # Should use dataCache.preferred here?
318 preferred = []
319 preferred_vars = []
320 pns = {}
321 for p in eligible:
322 pns[dataCache.pkg_fn[p]] = p
323 for p in eligible:
324 pn = dataCache.pkg_fn[p]
325 provides = dataCache.pn_provides[pn]
326 for provide in provides:
327 prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True)
328 #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
329 if prefervar in pns and pns[prefervar] not in preferred:
330 var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
331 logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
332 preferred_vars.append(var)
333 pref = pns[prefervar]
334 eligible.remove(pref)
335 eligible = [pref] + eligible
336 preferred.append(pref)
337 break
338
339 numberPreferred = len(preferred)
340
341 if numberPreferred > 1:
342 logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s", item, preferred, preferred_vars)
343
344 logger.debug(1, "sorted runtime providers for %s are: %s", item, eligible)
345
346 return eligible, numberPreferred
347
348regexp_cache = {}
349
350def getRuntimeProviders(dataCache, rdepend):
351 """
352 Return any providers of runtime dependency
353 """
354 rproviders = []
355
356 if rdepend in dataCache.rproviders:
357 rproviders += dataCache.rproviders[rdepend]
358
359 if rdepend in dataCache.packages:
360 rproviders += dataCache.packages[rdepend]
361
362 if rproviders:
363 return rproviders
364
365 # Only search dynamic packages if we can't find anything in other variables
366 for pattern in dataCache.packages_dynamic:
367 pattern = pattern.replace('+', "\+")
368 if pattern in regexp_cache:
369 regexp = regexp_cache[pattern]
370 else:
371 try:
372 regexp = re.compile(pattern)
373 except:
374 logger.error("Error parsing regular expression '%s'", pattern)
375 raise
376 regexp_cache[pattern] = regexp
377 if regexp.match(rdepend):
378 rproviders += dataCache.packages_dynamic[pattern]
379 logger.debug(1, "Assuming %s is a dynamic package, but it may not exist" % rdepend)
380
381 return rproviders
diff --git a/bitbake/lib/bb/pysh/__init__.py b/bitbake/lib/bb/pysh/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/pysh/__init__.py
diff --git a/bitbake/lib/bb/pysh/builtin.py b/bitbake/lib/bb/pysh/builtin.py
new file mode 100644
index 0000000000..b748e4a4f2
--- /dev/null
+++ b/bitbake/lib/bb/pysh/builtin.py
@@ -0,0 +1,710 @@
1# builtin.py - builtins and utilities definitions for pysh.
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8"""Builtin and internal utilities implementations.
9
10- Beware not to use python interpreter environment as if it were the shell
11environment. For instance, commands working directory must be explicitely handled
12through env['PWD'] instead of relying on python working directory.
13"""
14import errno
15import optparse
16import os
17import re
18import subprocess
19import sys
20import time
21
22def has_subprocess_bug():
23 return getattr(subprocess, 'list2cmdline') and \
24 ( subprocess.list2cmdline(['']) == '' or \
25 subprocess.list2cmdline(['foo|bar']) == 'foo|bar')
26
27# Detect python bug 1634343: "subprocess swallows empty arguments under win32"
28# <http://sourceforge.net/tracker/index.php?func=detail&aid=1634343&group_id=5470&atid=105470>
29# Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32"
30# <http://sourceforge.net/tracker/index.php?func=detail&aid=1710802&group_id=5470&atid=105470>
31if has_subprocess_bug():
32 import subprocess_fix
33 subprocess.list2cmdline = subprocess_fix.list2cmdline
34
35from sherrors import *
36
37class NonExitingParser(optparse.OptionParser):
38 """OptionParser default behaviour upon error is to print the error message and
39 exit. Raise a utility error instead.
40 """
41 def error(self, msg):
42 raise UtilityError(msg)
43
44#-------------------------------------------------------------------------------
45# set special builtin
46#-------------------------------------------------------------------------------
47OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters")
48OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False,
49 help='The shell shall disable pathname expansion.')
50OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False,
51 help="""When this option is on, if a simple command fails for any of the \
52 reasons listed in Consequences of Shell Errors or returns an exit status \
53 value >0, and is not part of the compound list following a while, until, \
54 or if keyword, and is not a part of an AND or OR list, and is not a \
55 pipeline preceded by the ! reserved word, then the shell shall immediately \
56 exit.""")
57OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False,
58 help="""The shell shall write to standard error a trace for each command \
59 after it expands the command and before it executes it. It is unspecified \
60 whether the command that turns tracing off is traced.""")
61
62def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags):
63 if 'debug-utility' in debugflags:
64 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
65
66 option, args = OPT_SET.parse_args(args)
67 env = interp.get_env()
68
69 if option.has_f:
70 env.set_opt('-f')
71 if option.has_e:
72 env.set_opt('-e')
73 if option.has_x:
74 env.set_opt('-x')
75 return 0
76
77#-------------------------------------------------------------------------------
78# shift special builtin
79#-------------------------------------------------------------------------------
80def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags):
81 if 'debug-utility' in debugflags:
82 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
83
84 params = interp.get_env().get_positional_args()
85 if args:
86 try:
87 n = int(args[0])
88 if n > len(params):
89 raise ValueError()
90 except ValueError:
91 return 1
92 else:
93 n = 1
94
95 params[:n] = []
96 interp.get_env().set_positional_args(params)
97 return 0
98
99#-------------------------------------------------------------------------------
100# export special builtin
101#-------------------------------------------------------------------------------
102OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters")
103OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False)
104
105def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags):
106 if 'debug-utility' in debugflags:
107 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
108
109 option, args = OPT_EXPORT.parse_args(args)
110 if option.has_p:
111 raise NotImplementedError()
112
113 for arg in args:
114 try:
115 name, value = arg.split('=', 1)
116 except ValueError:
117 name, value = arg, None
118 env = interp.get_env().export(name, value)
119
120 return 0
121
122#-------------------------------------------------------------------------------
123# return special builtin
124#-------------------------------------------------------------------------------
125def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags):
126 if 'debug-utility' in debugflags:
127 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
128 res = 0
129 if args:
130 try:
131 res = int(args[0])
132 except ValueError:
133 res = 0
134 if not 0<=res<=255:
135 res = 0
136
137 # BUG: should be last executed command exit code
138 raise ReturnSignal(res)
139
140#-------------------------------------------------------------------------------
141# trap special builtin
142#-------------------------------------------------------------------------------
143def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags):
144 if 'debug-utility' in debugflags:
145 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
146 if len(args) < 2:
147 stderr.write('trap: usage: trap [[arg] signal_spec ...]\n')
148 return 2
149
150 action = args[0]
151 for sig in args[1:]:
152 try:
153 env.traps[sig] = action
154 except Exception as e:
155 stderr.write('trap: %s\n' % str(e))
156 return 0
157
158#-------------------------------------------------------------------------------
159# unset special builtin
160#-------------------------------------------------------------------------------
161OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions")
162OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False)
163OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False)
164
165def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags):
166 if 'debug-utility' in debugflags:
167 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
168
169 option, args = OPT_UNSET.parse_args(args)
170
171 status = 0
172 env = interp.get_env()
173 for arg in args:
174 try:
175 if option.has_f:
176 env.remove_function(arg)
177 else:
178 del env[arg]
179 except KeyError:
180 pass
181 except VarAssignmentError:
182 status = 1
183
184 return status
185
186#-------------------------------------------------------------------------------
187# wait special builtin
188#-------------------------------------------------------------------------------
189def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags):
190 if 'debug-utility' in debugflags:
191 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
192
193 return interp.wait([int(arg) for arg in args])
194
195#-------------------------------------------------------------------------------
196# cat utility
197#-------------------------------------------------------------------------------
198def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags):
199 if 'debug-utility' in debugflags:
200 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
201
202 if not args:
203 args = ['-']
204
205 status = 0
206 for arg in args:
207 if arg == '-':
208 data = stdin.read()
209 else:
210 path = os.path.join(env['PWD'], arg)
211 try:
212 f = file(path, 'rb')
213 try:
214 data = f.read()
215 finally:
216 f.close()
217 except IOError as e:
218 if e.errno != errno.ENOENT:
219 raise
220 status = 1
221 continue
222 stdout.write(data)
223 stdout.flush()
224 return status
225
226#-------------------------------------------------------------------------------
227# cd utility
228#-------------------------------------------------------------------------------
229OPT_CD = NonExitingParser("cd - change the working directory")
230
231def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags):
232 if 'debug-utility' in debugflags:
233 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
234
235 option, args = OPT_CD.parse_args(args)
236 env = interp.get_env()
237
238 directory = None
239 printdir = False
240 if not args:
241 home = env.get('HOME')
242 if home:
243 # Unspecified, do nothing
244 return 0
245 else:
246 directory = home
247 elif len(args)==1:
248 directory = args[0]
249 if directory=='-':
250 if 'OLDPWD' not in env:
251 raise UtilityError("OLDPWD not set")
252 printdir = True
253 directory = env['OLDPWD']
254 else:
255 raise UtilityError("too many arguments")
256
257 curpath = None
258 # Absolute directories will be handled correctly by the os.path.join call.
259 if not directory.startswith('.') and not directory.startswith('..'):
260 cdpaths = env.get('CDPATH', '.').split(';')
261 for cdpath in cdpaths:
262 p = os.path.join(cdpath, directory)
263 if os.path.isdir(p):
264 curpath = p
265 break
266
267 if curpath is None:
268 curpath = directory
269 curpath = os.path.join(env['PWD'], directory)
270
271 env['OLDPWD'] = env['PWD']
272 env['PWD'] = curpath
273 if printdir:
274 stdout.write('%s\n' % curpath)
275 return 0
276
277#-------------------------------------------------------------------------------
278# colon utility
279#-------------------------------------------------------------------------------
280def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags):
281 if 'debug-utility' in debugflags:
282 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
283 return 0
284
285#-------------------------------------------------------------------------------
286# echo utility
287#-------------------------------------------------------------------------------
288def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags):
289 if 'debug-utility' in debugflags:
290 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
291
292 # Echo only takes arguments, no options. Use printf if you need fancy stuff.
293 output = ' '.join(args) + '\n'
294 stdout.write(output)
295 stdout.flush()
296 return 0
297
298#-------------------------------------------------------------------------------
299# egrep utility
300#-------------------------------------------------------------------------------
301# egrep is usually a shell script.
302# Unfortunately, pysh does not support shell scripts *with arguments* right now,
303# so the redirection is implemented here, assuming grep is available.
304def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
305 if 'debug-utility' in debugflags:
306 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
307
308 return run_command('grep', ['-E'] + args, interp, env, stdin, stdout,
309 stderr, debugflags)
310
311#-------------------------------------------------------------------------------
312# env utility
313#-------------------------------------------------------------------------------
314def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags):
315 if 'debug-utility' in debugflags:
316 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
317
318 if args and args[0]=='-i':
319 raise NotImplementedError('env: -i option is not implemented')
320
321 i = 0
322 for arg in args:
323 if '=' not in arg:
324 break
325 # Update the current environment
326 name, value = arg.split('=', 1)
327 env[name] = value
328 i += 1
329
330 if args[i:]:
331 # Find then execute the specified interpreter
332 utility = env.find_in_path(args[i])
333 if not utility:
334 return 127
335 args[i:i+1] = utility
336 name = args[i]
337 args = args[i+1:]
338 try:
339 return run_command(name, args, interp, env, stdin, stdout, stderr,
340 debugflags)
341 except UtilityError:
342 stderr.write('env: failed to execute %s' % ' '.join([name]+args))
343 return 126
344 else:
345 for pair in env.get_variables().iteritems():
346 stdout.write('%s=%s\n' % pair)
347 return 0
348
349#-------------------------------------------------------------------------------
350# exit utility
351#-------------------------------------------------------------------------------
352def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags):
353 if 'debug-utility' in debugflags:
354 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
355
356 res = None
357 if args:
358 try:
359 res = int(args[0])
360 except ValueError:
361 res = None
362 if not 0<=res<=255:
363 res = None
364
365 if res is None:
366 # BUG: should be last executed command exit code
367 res = 0
368
369 raise ExitSignal(res)
370
371#-------------------------------------------------------------------------------
372# fgrep utility
373#-------------------------------------------------------------------------------
374# see egrep
375def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
376 if 'debug-utility' in debugflags:
377 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
378
379 return run_command('grep', ['-F'] + args, interp, env, stdin, stdout,
380 stderr, debugflags)
381
382#-------------------------------------------------------------------------------
383# gunzip utility
384#-------------------------------------------------------------------------------
385# see egrep
386def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags):
387 if 'debug-utility' in debugflags:
388 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
389
390 return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout,
391 stderr, debugflags)
392
393#-------------------------------------------------------------------------------
394# kill utility
395#-------------------------------------------------------------------------------
396def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags):
397 if 'debug-utility' in debugflags:
398 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
399
400 for arg in args:
401 pid = int(arg)
402 status = subprocess.call(['pskill', '/T', str(pid)],
403 shell=True,
404 stdout=subprocess.PIPE,
405 stderr=subprocess.PIPE)
406 # pskill is asynchronous, hence the stupid polling loop
407 while 1:
408 p = subprocess.Popen(['pslist', str(pid)],
409 shell=True,
410 stdout=subprocess.PIPE,
411 stderr=subprocess.STDOUT)
412 output = p.communicate()[0]
413 if ('process %d was not' % pid) in output:
414 break
415 time.sleep(1)
416 return status
417
418#-------------------------------------------------------------------------------
419# mkdir utility
420#-------------------------------------------------------------------------------
421OPT_MKDIR = NonExitingParser("mkdir - make directories.")
422OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False)
423
424def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags):
425 if 'debug-utility' in debugflags:
426 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
427
428 # TODO: implement umask
429 # TODO: implement proper utility error report
430 option, args = OPT_MKDIR.parse_args(args)
431 for arg in args:
432 path = os.path.join(env['PWD'], arg)
433 if option.has_p:
434 try:
435 os.makedirs(path)
436 except IOError as e:
437 if e.errno != errno.EEXIST:
438 raise
439 else:
440 os.mkdir(path)
441 return 0
442
443#-------------------------------------------------------------------------------
444# netstat utility
445#-------------------------------------------------------------------------------
446def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags):
447 # Do you really expect me to implement netstat ?
448 # This empty form is enough for Mercurial tests since it's
449 # supposed to generate nothing upon success. Faking this test
450 # is not a big deal either.
451 if 'debug-utility' in debugflags:
452 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
453 return 0
454
455#-------------------------------------------------------------------------------
456# pwd utility
457#-------------------------------------------------------------------------------
458OPT_PWD = NonExitingParser("pwd - return working directory name")
459OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True,
460 help="""If the PWD environment variable contains an absolute pathname of \
461 the current directory that does not contain the filenames dot or dot-dot, \
462 pwd shall write this pathname to standard output. Otherwise, the -L option \
463 shall behave as the -P option.""")
464OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False,
465 help="""The absolute pathname written shall not contain filenames that, in \
466 the context of the pathname, refer to files of type symbolic link.""")
467
468def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags):
469 if 'debug-utility' in debugflags:
470 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
471
472 option, args = OPT_PWD.parse_args(args)
473 stdout.write('%s\n' % env['PWD'])
474 return 0
475
476#-------------------------------------------------------------------------------
477# printf utility
478#-------------------------------------------------------------------------------
479RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)')
480
481def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags):
482 if 'debug-utility' in debugflags:
483 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
484
485 def replace(m):
486 assert m.group()
487 g = m.group()[1:]
488 if g.startswith('x'):
489 return chr(int(g[1:], 16))
490 if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g):
491 # Yay, an octal number
492 return chr(int(g, 8))
493 return {
494 'a': '\a',
495 'b': '\b',
496 'f': '\f',
497 'n': '\n',
498 'r': '\r',
499 't': '\t',
500 'v': '\v',
501 '\\': '\\',
502 }.get(g)
503
504 # Convert escape sequences
505 format = re.sub(RE_UNESCAPE, replace, args[0])
506 stdout.write(format % tuple(args[1:]))
507 return 0
508
509#-------------------------------------------------------------------------------
510# true utility
511#-------------------------------------------------------------------------------
512def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags):
513 if 'debug-utility' in debugflags:
514 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
515 return 0
516
517#-------------------------------------------------------------------------------
518# sed utility
519#-------------------------------------------------------------------------------
520RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$')
521
522# cygwin sed fails with some expressions when they do not end with a single space.
523# see unit tests for details. Interestingly, the same expressions works perfectly
524# in cygwin shell.
525def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
526 if 'debug-utility' in debugflags:
527 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
528
529 # Scan pattern arguments and append a space if necessary
530 for i in xrange(len(args)):
531 if not RE_SED.search(args[i]):
532 continue
533 args[i] = args[i] + ' '
534
535 return run_command(name, args, interp, env, stdin, stdout,
536 stderr, debugflags)
537
538#-------------------------------------------------------------------------------
539# sleep utility
540#-------------------------------------------------------------------------------
541def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags):
542 if 'debug-utility' in debugflags:
543 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
544 time.sleep(int(args[0]))
545 return 0
546
547#-------------------------------------------------------------------------------
548# sort utility
549#-------------------------------------------------------------------------------
550OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files")
551
552def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags):
553
554 def sort(path):
555 if path == '-':
556 lines = stdin.readlines()
557 else:
558 try:
559 f = file(path)
560 try:
561 lines = f.readlines()
562 finally:
563 f.close()
564 except IOError as e:
565 stderr.write(str(e) + '\n')
566 return 1
567
568 if lines and lines[-1][-1]!='\n':
569 lines[-1] = lines[-1] + '\n'
570 return lines
571
572 if 'debug-utility' in debugflags:
573 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
574
575 option, args = OPT_SORT.parse_args(args)
576 alllines = []
577
578 if len(args)<=0:
579 args += ['-']
580
581 # Load all files lines
582 curdir = os.getcwd()
583 try:
584 os.chdir(env['PWD'])
585 for path in args:
586 alllines += sort(path)
587 finally:
588 os.chdir(curdir)
589
590 alllines.sort()
591 for line in alllines:
592 stdout.write(line)
593 return 0
594
595#-------------------------------------------------------------------------------
596# hg utility
597#-------------------------------------------------------------------------------
598
599hgcommands = [
600 'add',
601 'addremove',
602 'commit', 'ci',
603 'debugrename',
604 'debugwalk',
605 'falabala', # Dummy command used in a mercurial test
606 'incoming',
607 'locate',
608 'pull',
609 'push',
610 'qinit',
611 'remove', 'rm',
612 'rename', 'mv',
613 'revert',
614 'showconfig',
615 'status', 'st',
616 'strip',
617 ]
618
619def rewriteslashes(name, args):
620 # Several hg commands output file paths, rewrite the separators
621 if len(args) > 1 and name.lower().endswith('python') \
622 and args[0].endswith('hg'):
623 for cmd in hgcommands:
624 if cmd in args[1:]:
625 return True
626
627 # svn output contains many paths with OS specific separators.
628 # Normalize these to unix paths.
629 base = os.path.basename(name)
630 if base.startswith('svn'):
631 return True
632
633 return False
634
635def rewritehg(output):
636 if not output:
637 return output
638 # Rewrite os specific messages
639 output = output.replace(': The system cannot find the file specified',
640 ': No such file or directory')
641 output = re.sub(': Access is denied.*$', ': Permission denied', output)
642 output = output.replace(': No connection could be made because the target machine actively refused it',
643 ': Connection refused')
644 return output
645
646
647def run_command(name, args, interp, env, stdin, stdout,
648 stderr, debugflags):
649 # Execute the command
650 if 'debug-utility' in debugflags:
651 print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
652
653 hgbin = interp.options().hgbinary
654 ishg = hgbin and ('hg' in name or args and 'hg' in args[0])
655 unixoutput = 'cygwin' in name or ishg
656
657 exec_env = env.get_variables()
658 try:
659 # BUG: comparing file descriptor is clearly not a reliable way to tell
660 # whether they point on the same underlying object. But in pysh limited
661 # scope this is usually right, we do not expect complicated redirections
662 # besides usual 2>&1.
663 # Still there is one case we have but cannot deal with is when stdout
664 # and stderr are redirected *by pysh caller*. This the reason for the
665 # --redirect pysh() option.
666 # Now, we want to know they are the same because we sometimes need to
667 # transform the command output, mostly remove CR-LF to ensure that
668 # command output is unix-like. Cygwin utilies are a special case because
669 # they explicitely set their output streams to binary mode, so we have
670 # nothing to do. For all others commands, we have to guess whether they
671 # are sending text data, in which case the transformation must be done.
672 # Again, the NUL character test is unreliable but should be enough for
673 # hg tests.
674 redirected = stdout.fileno()==stderr.fileno()
675 if not redirected:
676 p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
677 stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
678 else:
679 p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
680 stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
681 out, err = p.communicate()
682 except WindowsError as e:
683 raise UtilityError(str(e))
684
685 if not unixoutput:
686 def encode(s):
687 if '\0' in s:
688 return s
689 return s.replace('\r\n', '\n')
690 else:
691 encode = lambda s: s
692
693 if rewriteslashes(name, args):
694 encode1_ = encode
695 def encode(s):
696 s = encode1_(s)
697 s = s.replace('\\\\', '\\')
698 s = s.replace('\\', '/')
699 return s
700
701 if ishg:
702 encode2_ = encode
703 def encode(s):
704 return rewritehg(encode2_(s))
705
706 stdout.write(encode(out))
707 if not redirected:
708 stderr.write(encode(err))
709 return p.returncode
710
diff --git a/bitbake/lib/bb/pysh/interp.py b/bitbake/lib/bb/pysh/interp.py
new file mode 100644
index 0000000000..25d8c92ec4
--- /dev/null
+++ b/bitbake/lib/bb/pysh/interp.py
@@ -0,0 +1,1367 @@
1# interp.py - shell interpreter for pysh.
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8"""Implement the shell interpreter.
9
10Most references are made to "The Open Group Base Specifications Issue 6".
11<http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html>
12"""
13# TODO: document the fact input streams must implement fileno() so Popen will work correctly.
14# it requires non-stdin stream to be implemented as files. Still to be tested...
15# DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here.
16# TODO: stop command execution upon error.
17# TODO: sort out the filename/io_number mess. It should be possible to use filenames only.
18# TODO: review subshell implementation
19# TODO: test environment cloning for non-special builtins
20# TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost
21# TODO: unit test for variable assignment
22# TODO: test error management wrt error type/utility type
23# TODO: test for binary output everywhere
24# BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary.
25import base64
26import cPickle as pickle
27import errno
28import glob
29import os
30import re
31import subprocess
32import sys
33import tempfile
34
35try:
36 s = set()
37 del s
38except NameError:
39 from Set import Set as set
40
41import builtin
42from sherrors import *
43import pyshlex
44import pyshyacc
45
46def mappend(func, *args, **kargs):
47 """Like map but assume func returns a list. Returned lists are merged into
48 a single one.
49 """
50 return reduce(lambda a,b: a+b, map(func, *args, **kargs), [])
51
52class FileWrapper:
53 """File object wrapper to ease debugging.
54
55 Allow mode checking and implement file duplication through a simple
56 reference counting scheme. Not sure the latter is really useful since
57 only real file descriptors can be used.
58 """
59 def __init__(self, mode, file, close=True):
60 if mode not in ('r', 'w', 'a'):
61 raise IOError('invalid mode: %s' % mode)
62 self._mode = mode
63 self._close = close
64 if isinstance(file, FileWrapper):
65 if file._refcount[0] <= 0:
66 raise IOError(0, 'Error')
67 self._refcount = file._refcount
68 self._refcount[0] += 1
69 self._file = file._file
70 else:
71 self._refcount = [1]
72 self._file = file
73
74 def dup(self):
75 return FileWrapper(self._mode, self, self._close)
76
77 def fileno(self):
78 """fileno() should be only necessary for input streams."""
79 return self._file.fileno()
80
81 def read(self, size=-1):
82 if self._mode!='r':
83 raise IOError(0, 'Error')
84 return self._file.read(size)
85
86 def readlines(self, *args, **kwargs):
87 return self._file.readlines(*args, **kwargs)
88
89 def write(self, s):
90 if self._mode not in ('w', 'a'):
91 raise IOError(0, 'Error')
92 return self._file.write(s)
93
94 def flush(self):
95 self._file.flush()
96
97 def close(self):
98 if not self._refcount:
99 return
100 assert self._refcount[0] > 0
101
102 self._refcount[0] -= 1
103 if self._refcount[0] == 0:
104 self._mode = 'c'
105 if self._close:
106 self._file.close()
107 self._refcount = None
108
109 def mode(self):
110 return self._mode
111
112 def __getattr__(self, name):
113 if name == 'name':
114 self.name = getattr(self._file, name)
115 return self.name
116 else:
117 raise AttributeError(name)
118
119 def __del__(self):
120 self.close()
121
122
123def win32_open_devnull(mode):
124 return open('NUL', mode)
125
126
127class Redirections:
128 """Stores open files and their mapping to pseudo-sh file descriptor.
129 """
130 # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does
131 # not make 1 to redirect to 4
132 def __init__(self, stdin=None, stdout=None, stderr=None):
133 self._descriptors = {}
134 if stdin is not None:
135 self._add_descriptor(0, stdin)
136 if stdout is not None:
137 self._add_descriptor(1, stdout)
138 if stderr is not None:
139 self._add_descriptor(2, stderr)
140
141 def add_here_document(self, interp, name, content, io_number=None):
142 if io_number is None:
143 io_number = 0
144
145 if name==pyshlex.unquote_wordtree(name):
146 content = interp.expand_here_document(('TOKEN', content))
147
148 # Write document content in a temporary file
149 tmp = tempfile.TemporaryFile()
150 try:
151 tmp.write(content)
152 tmp.flush()
153 tmp.seek(0)
154 self._add_descriptor(io_number, FileWrapper('r', tmp))
155 except:
156 tmp.close()
157 raise
158
159 def add(self, interp, op, filename, io_number=None):
160 if op not in ('<', '>', '>|', '>>', '>&'):
161 # TODO: add descriptor duplication and here_documents
162 raise RedirectionError('Unsupported redirection operator "%s"' % op)
163
164 if io_number is not None:
165 io_number = int(io_number)
166
167 if (op == '>&' and filename.isdigit()) or filename=='-':
168 # No expansion for file descriptors, quote them if you want a filename
169 fullname = filename
170 else:
171 if filename.startswith('/'):
172 # TODO: win32 kludge
173 if filename=='/dev/null':
174 fullname = 'NUL'
175 else:
176 # TODO: handle absolute pathnames, they are unlikely to exist on the
177 # current platform (win32 for instance).
178 raise NotImplementedError()
179 else:
180 fullname = interp.expand_redirection(('TOKEN', filename))
181 if not fullname:
182 raise RedirectionError('%s: ambiguous redirect' % filename)
183 # Build absolute path based on PWD
184 fullname = os.path.join(interp.get_env()['PWD'], fullname)
185
186 if op=='<':
187 return self._add_input_redirection(interp, fullname, io_number)
188 elif op in ('>', '>|'):
189 clobber = ('>|'==op)
190 return self._add_output_redirection(interp, fullname, io_number, clobber)
191 elif op=='>>':
192 return self._add_output_appending(interp, fullname, io_number)
193 elif op=='>&':
194 return self._dup_output_descriptor(fullname, io_number)
195
196 def close(self):
197 if self._descriptors is not None:
198 for desc in self._descriptors.itervalues():
199 desc.flush()
200 desc.close()
201 self._descriptors = None
202
203 def stdin(self):
204 return self._descriptors[0]
205
206 def stdout(self):
207 return self._descriptors[1]
208
209 def stderr(self):
210 return self._descriptors[2]
211
212 def clone(self):
213 clone = Redirections()
214 for desc, fileobj in self._descriptors.iteritems():
215 clone._descriptors[desc] = fileobj.dup()
216 return clone
217
218 def _add_output_redirection(self, interp, filename, io_number, clobber):
219 if io_number is None:
220 # io_number default to standard output
221 io_number = 1
222
223 if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename):
224 # File already exist in no-clobber mode, bail out
225 raise RedirectionError('File "%s" already exists' % filename)
226
227 # Open and register
228 self._add_file_descriptor(io_number, filename, 'w')
229
230 def _add_output_appending(self, interp, filename, io_number):
231 if io_number is None:
232 io_number = 1
233 self._add_file_descriptor(io_number, filename, 'a')
234
235 def _add_input_redirection(self, interp, filename, io_number):
236 if io_number is None:
237 io_number = 0
238 self._add_file_descriptor(io_number, filename, 'r')
239
240 def _add_file_descriptor(self, io_number, filename, mode):
241 try:
242 if filename.startswith('/'):
243 if filename=='/dev/null':
244 f = win32_open_devnull(mode+'b')
245 else:
246 # TODO: handle absolute pathnames, they are unlikely to exist on the
247 # current platform (win32 for instance).
248 raise NotImplementedError('cannot open absolute path %s' % repr(filename))
249 else:
250 f = file(filename, mode+'b')
251 except IOError as e:
252 raise RedirectionError(str(e))
253
254 wrapper = None
255 try:
256 wrapper = FileWrapper(mode, f)
257 f = None
258 self._add_descriptor(io_number, wrapper)
259 except:
260 if f: f.close()
261 if wrapper: wrapper.close()
262 raise
263
264 def _dup_output_descriptor(self, source_fd, dest_fd):
265 if source_fd is None:
266 source_fd = 1
267 self._dup_file_descriptor(source_fd, dest_fd, 'w')
268
269 def _dup_file_descriptor(self, source_fd, dest_fd, mode):
270 source_fd = int(source_fd)
271 if source_fd not in self._descriptors:
272 raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd))
273 source = self._descriptors[source_fd]
274
275 if source.mode()!=mode:
276 raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode))
277
278 if dest_fd=='-':
279 # Close the source descriptor
280 del self._descriptors[source_fd]
281 source.close()
282 else:
283 dest_fd = int(dest_fd)
284 if dest_fd not in self._descriptors:
285 raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd))
286
287 dest = self._descriptors[dest_fd]
288 if dest.mode()!=mode:
289 raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode))
290
291 self._descriptors[dest_fd] = source.dup()
292 dest.close()
293
294 def _add_descriptor(self, io_number, file):
295 io_number = int(io_number)
296
297 if io_number in self._descriptors:
298 # Close the current descriptor
299 d = self._descriptors[io_number]
300 del self._descriptors[io_number]
301 d.close()
302
303 self._descriptors[io_number] = file
304
305 def __str__(self):
306 names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v
307 in self._descriptors.iteritems()]
308 names = ','.join(names)
309 return 'Redirections(%s)' % names
310
311 def __del__(self):
312 self.close()
313
314def cygwin_to_windows_path(path):
315 """Turn /cygdrive/c/foo into c:/foo, or return path if it
316 is not a cygwin path.
317 """
318 if not path.startswith('/cygdrive/'):
319 return path
320 path = path[len('/cygdrive/'):]
321 path = path[:1] + ':' + path[1:]
322 return path
323
324def win32_to_unix_path(path):
325 if path is not None:
326 path = path.replace('\\', '/')
327 return path
328
329_RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?')
330_SHEBANG_CMDS = {
331 '/usr/bin/env': 'env',
332 '/bin/sh': 'pysh',
333 'python': 'python',
334}
335
336def resolve_shebang(path, ignoreshell=False):
337 """Return a list of arguments as shebang interpreter call or an empty list
338 if path does not refer to an executable script.
339 See <http://www.opengroup.org/austin/docs/austin_51r2.txt>.
340
341 ignoreshell - set to True to ignore sh shebangs. Return an empty list instead.
342 """
343 try:
344 f = file(path)
345 try:
346 # At most 80 characters in the first line
347 header = f.read(80).splitlines()[0]
348 finally:
349 f.close()
350
351 m = _RE_SHEBANG.search(header)
352 if not m:
353 return []
354 cmd, arg = m.group(1,2)
355 if os.path.isfile(cmd):
356 # Keep this one, the hg script for instance contains a weird windows
357 # shebang referencing the current python install.
358 cmdfile = os.path.basename(cmd).lower()
359 if cmdfile == 'python.exe':
360 cmd = 'python'
361 pass
362 elif cmd not in _SHEBANG_CMDS:
363 raise CommandNotFound('Unknown interpreter "%s" referenced in '\
364 'shebang' % header)
365 cmd = _SHEBANG_CMDS.get(cmd)
366 if cmd is None or (ignoreshell and cmd == 'pysh'):
367 return []
368 if arg is None:
369 return [cmd, win32_to_unix_path(path)]
370 return [cmd, arg, win32_to_unix_path(path)]
371 except IOError as e:
372 if e.errno!=errno.ENOENT and \
373 (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM
374 raise
375 return []
376
377def win32_find_in_path(name, path):
378 if isinstance(path, str):
379 path = path.split(os.pathsep)
380
381 exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep)
382 for p in path:
383 p_name = os.path.join(p, name)
384
385 prefix = resolve_shebang(p_name)
386 if prefix:
387 return prefix
388
389 for ext in exts:
390 p_name_ext = p_name + ext
391 if os.path.exists(p_name_ext):
392 return [win32_to_unix_path(p_name_ext)]
393 return []
394
395class Traps(dict):
396 def __setitem__(self, key, value):
397 if key not in ('EXIT',):
398 raise NotImplementedError()
399 super(Traps, self).__setitem__(key, value)
400
401# IFS white spaces character class
402_IFS_WHITESPACES = (' ', '\t', '\n')
403
404class Environment:
405 """Environment holds environment variables, export table, function
406 definitions and whatever is defined in 2.12 "Shell Execution Environment",
407 redirection excepted.
408 """
409 def __init__(self, pwd):
410 self._opt = set() #Shell options
411
412 self._functions = {}
413 self._env = {'?': '0', '#': '0'}
414 self._exported = set([
415 'HOME', 'IFS', 'PATH'
416 ])
417
418 # Set environment vars with side-effects
419 self._ifs_ws = None # Set of IFS whitespace characters
420 self._ifs_re = None # Regular expression used to split between words using IFS classes
421 self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values
422 self['PWD'] = pwd
423 self.traps = Traps()
424
425 def clone(self, subshell=False):
426 env = Environment(self['PWD'])
427 env._opt = set(self._opt)
428 for k,v in self.get_variables().iteritems():
429 if k in self._exported:
430 env.export(k,v)
431 elif subshell:
432 env[k] = v
433
434 if subshell:
435 env._functions = dict(self._functions)
436
437 return env
438
439 def __getitem__(self, key):
440 if key in ('@', '*', '-', '$'):
441 raise NotImplementedError('%s is not implemented' % repr(key))
442 return self._env[key]
443
444 def get(self, key, defval=None):
445 try:
446 return self[key]
447 except KeyError:
448 return defval
449
450 def __setitem__(self, key, value):
451 if key=='IFS':
452 # Update the whitespace/non-whitespace classes
453 self._update_ifs(value)
454 elif key=='PWD':
455 pwd = os.path.abspath(value)
456 if not os.path.isdir(pwd):
457 raise VarAssignmentError('Invalid directory %s' % value)
458 value = pwd
459 elif key in ('?', '!'):
460 value = str(int(value))
461 self._env[key] = value
462
463 def __delitem__(self, key):
464 if key in ('IFS', 'PWD', '?'):
465 raise VarAssignmentError('%s cannot be unset' % key)
466 del self._env[key]
467
468 def __contains__(self, item):
469 return item in self._env
470
471 def set_positional_args(self, args):
472 """Set the content of 'args' as positional argument from 1 to len(args).
473 Return previous argument as a list of strings.
474 """
475 # Save and remove previous arguments
476 prevargs = []
477 for i in xrange(int(self._env['#'])):
478 i = str(i+1)
479 prevargs.append(self._env[i])
480 del self._env[i]
481 self._env['#'] = '0'
482
483 #Set new ones
484 for i,arg in enumerate(args):
485 self._env[str(i+1)] = str(arg)
486 self._env['#'] = str(len(args))
487
488 return prevargs
489
490 def get_positional_args(self):
491 return [self._env[str(i+1)] for i in xrange(int(self._env['#']))]
492
493 def get_variables(self):
494 return dict(self._env)
495
496 def export(self, key, value=None):
497 if value is not None:
498 self[key] = value
499 self._exported.add(key)
500
501 def get_exported(self):
502 return [(k,self._env.get(k)) for k in self._exported]
503
504 def split_fields(self, word):
505 if not self._ifs_ws or not word:
506 return [word]
507 return re.split(self._ifs_re, word)
508
509 def _update_ifs(self, value):
510 """Update the split_fields related variables when IFS character set is
511 changed.
512 """
513 # TODO: handle NULL IFS
514
515 # Separate characters in whitespace and non-whitespace
516 chars = set(value)
517 ws = [c for c in chars if c in _IFS_WHITESPACES]
518 nws = [c for c in chars if c not in _IFS_WHITESPACES]
519
520 # Keep whitespaces in a string for left and right stripping
521 self._ifs_ws = ''.join(ws)
522
523 # Build a regexp to split fields
524 trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']'
525 if nws:
526 # First, the single non-whitespace occurence.
527 nws = '[' + ''.join([re.escape(c) for c in nws]) + ']'
528 nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)'
529 else:
530 # Then mix all parts with quantifiers
531 nws = trailing + '+'
532 self._ifs_re = re.compile(nws)
533
534 def has_opt(self, opt, val=None):
535 return (opt, val) in self._opt
536
537 def set_opt(self, opt, val=None):
538 self._opt.add((opt, val))
539
540 def find_in_path(self, name, pwd=False):
541 path = self._env.get('PATH', '').split(os.pathsep)
542 if pwd:
543 path[:0] = [self['PWD']]
544 if os.name == 'nt':
545 return win32_find_in_path(name, self._env.get('PATH', ''))
546 else:
547 raise NotImplementedError()
548
549 def define_function(self, name, body):
550 if not is_name(name):
551 raise ShellSyntaxError('%s is not a valid function name' % repr(name))
552 self._functions[name] = body
553
554 def remove_function(self, name):
555 del self._functions[name]
556
557 def is_function(self, name):
558 return name in self._functions
559
560 def get_function(self, name):
561 return self._functions.get(name)
562
563
564name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
565name_charset = dict(zip(name_charset,name_charset))
566
567def match_name(s):
568 """Return the length in characters of the longest prefix made of name
569 allowed characters in s.
570 """
571 for i,c in enumerate(s):
572 if c not in name_charset:
573 return s[:i]
574 return s
575
576def is_name(s):
577 return len([c for c in s if c not in name_charset])<=0
578
579def is_special_param(c):
580 return len(c)==1 and c in ('@','*','#','?','-','$','!','0')
581
582def utility_not_implemented(name, *args, **kwargs):
583 raise NotImplementedError('%s utility is not implemented' % name)
584
585
586class Utility:
587 """Define utilities properties:
588 func -- utility callable. See builtin module for utility samples.
589 is_special -- see XCU 2.8.
590 """
591 def __init__(self, func, is_special=0):
592 self.func = func
593 self.is_special = bool(is_special)
594
595
596def encodeargs(args):
597 def encodearg(s):
598 lines = base64.encodestring(s)
599 lines = [l.splitlines()[0] for l in lines]
600 return ''.join(lines)
601
602 s = pickle.dumps(args)
603 return encodearg(s)
604
605def decodeargs(s):
606 s = base64.decodestring(s)
607 return pickle.loads(s)
608
609
610class GlobError(Exception):
611 pass
612
613class Options:
614 def __init__(self):
615 # True if Mercurial operates with binary streams
616 self.hgbinary = True
617
618class Interpreter:
619 # Implementation is very basic: the execute() method just makes a DFS on the
620 # AST and execute nodes one by one. Nodes are tuple (name,obj) where name
621 # is a string identifier and obj the AST element returned by the parser.
622 #
623 # Handler are named after the node identifiers.
624 # TODO: check node names and remove the switch in execute with some
625 # dynamic getattr() call to find node handlers.
626 """Shell interpreter.
627
628 The following debugging flags can be passed:
629 debug-parsing - enable PLY debugging.
630 debug-tree - print the generated AST.
631 debug-cmd - trace command execution before word expansion, plus exit status.
632 debug-utility - trace utility execution.
633 """
634
635 # List supported commands.
636 COMMANDS = {
637 'cat': Utility(builtin.utility_cat,),
638 'cd': Utility(builtin.utility_cd,),
639 ':': Utility(builtin.utility_colon,),
640 'echo': Utility(builtin.utility_echo),
641 'env': Utility(builtin.utility_env),
642 'exit': Utility(builtin.utility_exit),
643 'export': Utility(builtin.builtin_export, is_special=1),
644 'egrep': Utility(builtin.utility_egrep),
645 'fgrep': Utility(builtin.utility_fgrep),
646 'gunzip': Utility(builtin.utility_gunzip),
647 'kill': Utility(builtin.utility_kill),
648 'mkdir': Utility(builtin.utility_mkdir),
649 'netstat': Utility(builtin.utility_netstat),
650 'printf': Utility(builtin.utility_printf),
651 'pwd': Utility(builtin.utility_pwd),
652 'return': Utility(builtin.builtin_return, is_special=1),
653 'sed': Utility(builtin.utility_sed,),
654 'set': Utility(builtin.builtin_set,),
655 'shift': Utility(builtin.builtin_shift,),
656 'sleep': Utility(builtin.utility_sleep,),
657 'sort': Utility(builtin.utility_sort,),
658 'trap': Utility(builtin.builtin_trap, is_special=1),
659 'true': Utility(builtin.utility_true),
660 'unset': Utility(builtin.builtin_unset, is_special=1),
661 'wait': Utility(builtin.builtin_wait, is_special=1),
662 }
663
664 def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None,
665 stdout=None, stderr=None, opts=Options()):
666 self._env = env
667 if self._env is None:
668 self._env = Environment(pwd)
669 self._children = {}
670
671 self._redirs = redirs
672 self._close_redirs = False
673
674 if self._redirs is None:
675 if stdin is None:
676 stdin = sys.stdin
677 if stdout is None:
678 stdout = sys.stdout
679 if stderr is None:
680 stderr = sys.stderr
681 stdin = FileWrapper('r', stdin, False)
682 stdout = FileWrapper('w', stdout, False)
683 stderr = FileWrapper('w', stderr, False)
684 self._redirs = Redirections(stdin, stdout, stderr)
685 self._close_redirs = True
686
687 self._debugflags = list(debugflags)
688 self._logfile = sys.stderr
689 self._options = opts
690
691 def close(self):
692 """Must be called when the interpreter is no longer used."""
693 script = self._env.traps.get('EXIT')
694 if script:
695 try:
696 self.execute_script(script=script)
697 except:
698 pass
699
700 if self._redirs is not None and self._close_redirs:
701 self._redirs.close()
702 self._redirs = None
703
704 def log(self, s):
705 self._logfile.write(s)
706 self._logfile.flush()
707
708 def __getitem__(self, key):
709 return self._env[key]
710
711 def __setitem__(self, key, value):
712 self._env[key] = value
713
714 def options(self):
715 return self._options
716
717 def redirect(self, redirs, ios):
718 def add_redir(io):
719 if isinstance(io, pyshyacc.IORedirect):
720 redirs.add(self, io.op, io.filename, io.io_number)
721 else:
722 redirs.add_here_document(self, io.name, io.content, io.io_number)
723
724 map(add_redir, ios)
725 return redirs
726
727 def execute_script(self, script=None, ast=None, sourced=False,
728 scriptpath=None):
729 """If script is not None, parse the input. Otherwise takes the supplied
730 AST. Then execute the AST.
731 Return the script exit status.
732 """
733 try:
734 if scriptpath is not None:
735 self._env['0'] = os.path.abspath(scriptpath)
736
737 if script is not None:
738 debug_parsing = ('debug-parsing' in self._debugflags)
739 cmds, script = pyshyacc.parse(script, True, debug_parsing)
740 if 'debug-tree' in self._debugflags:
741 pyshyacc.print_commands(cmds, self._logfile)
742 self._logfile.flush()
743 else:
744 cmds, script = ast, ''
745
746 status = 0
747 for cmd in cmds:
748 try:
749 status = self.execute(cmd)
750 except ExitSignal as e:
751 if sourced:
752 raise
753 status = int(e.args[0])
754 return status
755 except ShellError:
756 self._env['?'] = 1
757 raise
758 if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags:
759 self.log('returncode ' + str(status)+ '\n')
760 return status
761 except CommandNotFound as e:
762 print >>self._redirs.stderr, str(e)
763 self._redirs.stderr.flush()
764 # Command not found by non-interactive shell
765 # return 127
766 raise
767 except RedirectionError as e:
768 # TODO: should be handled depending on the utility status
769 print >>self._redirs.stderr, str(e)
770 self._redirs.stderr.flush()
771 # Command not found by non-interactive shell
772 # return 127
773 raise
774
775 def dotcommand(self, env, args):
776 if len(args) < 1:
777 raise ShellError('. expects at least one argument')
778 path = args[0]
779 if '/' not in path:
780 found = env.find_in_path(args[0], True)
781 if found:
782 path = found[0]
783 script = file(path).read()
784 return self.execute_script(script=script, sourced=True)
785
786 def execute(self, token, redirs=None):
787 """Execute and AST subtree with supplied redirections overriding default
788 interpreter ones.
789 Return the exit status.
790 """
791 if not token:
792 return 0
793
794 if redirs is None:
795 redirs = self._redirs
796
797 if isinstance(token, list):
798 # Commands sequence
799 res = 0
800 for t in token:
801 res = self.execute(t, redirs)
802 return res
803
804 type, value = token
805 status = 0
806 if type=='simple_command':
807 redirs_copy = redirs.clone()
808 try:
809 # TODO: define and handle command return values
810 # TODO: implement set -e
811 status = self._execute_simple_command(value, redirs_copy)
812 finally:
813 redirs_copy.close()
814 elif type=='pipeline':
815 status = self._execute_pipeline(value, redirs)
816 elif type=='and_or':
817 status = self._execute_and_or(value, redirs)
818 elif type=='for_clause':
819 status = self._execute_for_clause(value, redirs)
820 elif type=='while_clause':
821 status = self._execute_while_clause(value, redirs)
822 elif type=='function_definition':
823 status = self._execute_function_definition(value, redirs)
824 elif type=='brace_group':
825 status = self._execute_brace_group(value, redirs)
826 elif type=='if_clause':
827 status = self._execute_if_clause(value, redirs)
828 elif type=='subshell':
829 status = self.subshell(ast=value.cmds, redirs=redirs)
830 elif type=='async':
831 status = self._asynclist(value)
832 elif type=='redirect_list':
833 redirs_copy = self.redirect(redirs.clone(), value.redirs)
834 try:
835 status = self.execute(value.cmd, redirs_copy)
836 finally:
837 redirs_copy.close()
838 else:
839 raise NotImplementedError('Unsupported token type ' + type)
840
841 if status < 0:
842 status = 255
843 return status
844
845 def _execute_if_clause(self, if_clause, redirs):
846 cond_status = self.execute(if_clause.cond, redirs)
847 if cond_status==0:
848 return self.execute(if_clause.if_cmds, redirs)
849 else:
850 return self.execute(if_clause.else_cmds, redirs)
851
852 def _execute_brace_group(self, group, redirs):
853 status = 0
854 for cmd in group.cmds:
855 status = self.execute(cmd, redirs)
856 return status
857
858 def _execute_function_definition(self, fundef, redirs):
859 self._env.define_function(fundef.name, fundef.body)
860 return 0
861
862 def _execute_while_clause(self, while_clause, redirs):
863 status = 0
864 while 1:
865 cond_status = 0
866 for cond in while_clause.condition:
867 cond_status = self.execute(cond, redirs)
868
869 if cond_status:
870 break
871
872 for cmd in while_clause.cmds:
873 status = self.execute(cmd, redirs)
874
875 return status
876
877 def _execute_for_clause(self, for_clause, redirs):
878 if not is_name(for_clause.name):
879 raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name))
880 items = mappend(self.expand_token, for_clause.items)
881
882 status = 0
883 for item in items:
884 self._env[for_clause.name] = item
885 for cmd in for_clause.cmds:
886 status = self.execute(cmd, redirs)
887 return status
888
889 def _execute_and_or(self, or_and, redirs):
890 res = self.execute(or_and.left, redirs)
891 if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0):
892 res = self.execute(or_and.right, redirs)
893 return res
894
895 def _execute_pipeline(self, pipeline, redirs):
896 if len(pipeline.commands)==1:
897 status = self.execute(pipeline.commands[0], redirs)
898 else:
899 # Execute all commands one after the other
900 status = 0
901 inpath, outpath = None, None
902 try:
903 # Commands inputs and outputs cannot really be plugged as done
904 # by a real shell. Run commands sequentially and chain their
905 # input/output throught temporary files.
906 tmpfd, inpath = tempfile.mkstemp()
907 os.close(tmpfd)
908 tmpfd, outpath = tempfile.mkstemp()
909 os.close(tmpfd)
910
911 inpath = win32_to_unix_path(inpath)
912 outpath = win32_to_unix_path(outpath)
913
914 for i, cmd in enumerate(pipeline.commands):
915 call_redirs = redirs.clone()
916 try:
917 if i!=0:
918 call_redirs.add(self, '<', inpath)
919 if i!=len(pipeline.commands)-1:
920 call_redirs.add(self, '>', outpath)
921
922 status = self.execute(cmd, call_redirs)
923
924 # Chain inputs/outputs
925 inpath, outpath = outpath, inpath
926 finally:
927 call_redirs.close()
928 finally:
929 if inpath: os.remove(inpath)
930 if outpath: os.remove(outpath)
931
932 if pipeline.reverse_status:
933 status = int(not status)
934 self._env['?'] = status
935 return status
936
937 def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others):
938 assert interp is self
939
940 func = env.get_function(name)
941 #Set positional parameters
942 prevargs = None
943 try:
944 prevargs = env.set_positional_args(args)
945 try:
946 redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup())
947 try:
948 status = self.execute(func, redirs)
949 finally:
950 redirs.close()
951 except ReturnSignal as e:
952 status = int(e.args[0])
953 env['?'] = status
954 return status
955 finally:
956 #Reset positional parameters
957 if prevargs is not None:
958 env.set_positional_args(prevargs)
959
960 def _execute_simple_command(self, token, redirs):
961 """Can raise ReturnSignal when return builtin is called, ExitSignal when
962 exit is called, and other shell exceptions upon builtin failures.
963 """
964 debug_command = 'debug-cmd' in self._debugflags
965 if debug_command:
966 self.log('word' + repr(token.words) + '\n')
967 self.log('assigns' + repr(token.assigns) + '\n')
968 self.log('redirs' + repr(token.redirs) + '\n')
969
970 is_special = None
971 env = self._env
972
973 try:
974 # Word expansion
975 args = []
976 for word in token.words:
977 args += self.expand_token(word)
978 if is_special is None and args:
979 is_special = env.is_function(args[0]) or \
980 (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special)
981
982 if debug_command:
983 self.log('_execute_simple_command' + str(args) + '\n')
984
985 if not args:
986 # Redirections happen is a subshell
987 redirs = redirs.clone()
988 elif not is_special:
989 env = self._env.clone()
990
991 # Redirections
992 self.redirect(redirs, token.redirs)
993
994 # Variables assignments
995 res = 0
996 for type,(k,v) in token.assigns:
997 status, expanded = self.expand_variable((k,v))
998 if status is not None:
999 res = status
1000 if args:
1001 env.export(k, expanded)
1002 else:
1003 env[k] = expanded
1004
1005 if args and args[0] in ('.', 'source'):
1006 res = self.dotcommand(env, args[1:])
1007 elif args:
1008 if args[0] in self.COMMANDS:
1009 command = self.COMMANDS[args[0]]
1010 elif env.is_function(args[0]):
1011 command = Utility(self._execute_function, is_special=True)
1012 else:
1013 if not '/' in args[0].replace('\\', '/'):
1014 cmd = env.find_in_path(args[0])
1015 if not cmd:
1016 # TODO: test error code on unknown command => 127
1017 raise CommandNotFound('Unknown command: "%s"' % args[0])
1018 else:
1019 # Handle commands like '/cygdrive/c/foo.bat'
1020 cmd = cygwin_to_windows_path(args[0])
1021 if not os.path.exists(cmd):
1022 raise CommandNotFound('%s: No such file or directory' % args[0])
1023 shebang = resolve_shebang(cmd)
1024 if shebang:
1025 cmd = shebang
1026 else:
1027 cmd = [cmd]
1028 args[0:1] = cmd
1029 command = Utility(builtin.run_command)
1030
1031 # Command execution
1032 if 'debug-cmd' in self._debugflags:
1033 self.log('redirections ' + str(redirs) + '\n')
1034
1035 res = command.func(args[0], args[1:], self, env,
1036 redirs.stdin(), redirs.stdout(),
1037 redirs.stderr(), self._debugflags)
1038
1039 if self._env.has_opt('-x'):
1040 # Trace command execution in shell environment
1041 # BUG: would be hard to reproduce a real shell behaviour since
1042 # the AST is not annotated with source lines/tokens.
1043 self._redirs.stdout().write(' '.join(args))
1044
1045 except ReturnSignal:
1046 raise
1047 except ShellError as e:
1048 if is_special or isinstance(e, (ExitSignal,
1049 ShellSyntaxError, ExpansionError)):
1050 raise e
1051 self._redirs.stderr().write(str(e)+'\n')
1052 return 1
1053
1054 return res
1055
1056 def expand_token(self, word):
1057 """Expand a word as specified in [2.6 Word Expansions]. Return the list
1058 of expanded words.
1059 """
1060 status, wtrees = self._expand_word(word)
1061 return map(pyshlex.wordtree_as_string, wtrees)
1062
1063 def expand_variable(self, word):
1064 """Return a status code (or None if no command expansion occurred)
1065 and a single word.
1066 """
1067 status, wtrees = self._expand_word(word, pathname=False, split=False)
1068 words = map(pyshlex.wordtree_as_string, wtrees)
1069 assert len(words)==1
1070 return status, words[0]
1071
1072 def expand_here_document(self, word):
1073 """Return the expanded document as a single word. The here document is
1074 assumed to be unquoted.
1075 """
1076 status, wtrees = self._expand_word(word, pathname=False,
1077 split=False, here_document=True)
1078 words = map(pyshlex.wordtree_as_string, wtrees)
1079 assert len(words)==1
1080 return words[0]
1081
1082 def expand_redirection(self, word):
1083 """Return a single word."""
1084 return self.expand_variable(word)[1]
1085
1086 def get_env(self):
1087 return self._env
1088
1089 def _expand_word(self, token, pathname=True, split=True, here_document=False):
1090 wtree = pyshlex.make_wordtree(token[1], here_document=here_document)
1091
1092 # TODO: implement tilde expansion
1093 def expand(wtree):
1094 """Return a pseudo wordtree: the tree or its subelements can be empty
1095 lists when no value result from the expansion.
1096 """
1097 status = None
1098 for part in wtree:
1099 if not isinstance(part, list):
1100 continue
1101 if part[0]in ("'", '\\'):
1102 continue
1103 elif part[0] in ('`', '$('):
1104 status, result = self._expand_command(part)
1105 part[:] = result
1106 elif part[0] in ('$', '${'):
1107 part[:] = self._expand_parameter(part, wtree[0]=='"', split)
1108 elif part[0] in ('', '"'):
1109 status, result = expand(part)
1110 part[:] = result
1111 else:
1112 raise NotImplementedError('%s expansion is not implemented'
1113 % part[0])
1114 # [] is returned when an expansion result in no-field,
1115 # like an empty $@
1116 wtree = [p for p in wtree if p != []]
1117 if len(wtree) < 3:
1118 return status, []
1119 return status, wtree
1120
1121 status, wtree = expand(wtree)
1122 if len(wtree) == 0:
1123 return status, wtree
1124 wtree = pyshlex.normalize_wordtree(wtree)
1125
1126 if split:
1127 wtrees = self._split_fields(wtree)
1128 else:
1129 wtrees = [wtree]
1130
1131 if pathname:
1132 wtrees = mappend(self._expand_pathname, wtrees)
1133
1134 wtrees = map(self._remove_quotes, wtrees)
1135 return status, wtrees
1136
1137 def _expand_command(self, wtree):
1138 # BUG: there is something to do with backslashes and quoted
1139 # characters here
1140 command = pyshlex.wordtree_as_string(wtree[1:-1])
1141 status, output = self.subshell_output(command)
1142 return status, ['', output, '']
1143
1144 def _expand_parameter(self, wtree, quoted=False, split=False):
1145 """Return a valid wtree or an empty list when no parameter results."""
1146 # Get the parameter name
1147 # TODO: implement weird expansion rules with ':'
1148 name = pyshlex.wordtree_as_string(wtree[1:-1])
1149 if not is_name(name) and not is_special_param(name):
1150 raise ExpansionError('Bad substitution "%s"' % name)
1151 # TODO: implement special parameters
1152 if name in ('@', '*'):
1153 args = self._env.get_positional_args()
1154 if len(args) == 0:
1155 return []
1156 if len(args)<2:
1157 return ['', ''.join(args), '']
1158
1159 sep = self._env.get('IFS', '')[:1]
1160 if split and quoted and name=='@':
1161 # Introduce a new token to tell the caller that these parameters
1162 # cause a split as specified in 2.5.2
1163 return ['@'] + args + ['']
1164 else:
1165 return ['', sep.join(args), '']
1166
1167 return ['', self._env.get(name, ''), '']
1168
1169 def _split_fields(self, wtree):
1170 def is_empty(split):
1171 return split==['', '', '']
1172
1173 def split_positional(quoted):
1174 # Return a list of wtree split according positional parameters rules.
1175 # All remaining '@' groups are removed.
1176 assert quoted[0]=='"'
1177
1178 splits = [[]]
1179 for part in quoted:
1180 if not isinstance(part, list) or part[0]!='@':
1181 splits[-1].append(part)
1182 else:
1183 # Empty or single argument list were dealt with already
1184 assert len(part)>3
1185 # First argument must join with the beginning part of the original word
1186 splits[-1].append(part[1])
1187 # Create double-quotes expressions for every argument after the first
1188 for arg in part[2:-1]:
1189 splits[-1].append('"')
1190 splits.append(['"', arg])
1191 return splits
1192
1193 # At this point, all expansions but pathnames have occured. Only quoted
1194 # and positional sequences remain. Thus, all candidates for field splitting
1195 # are in the tree root, or are positional splits ('@') and lie in root
1196 # children.
1197 if not wtree or wtree[0] not in ('', '"'):
1198 # The whole token is quoted or empty, nothing to split
1199 return [wtree]
1200
1201 if wtree[0]=='"':
1202 wtree = ['', wtree, '']
1203
1204 result = [['', '']]
1205 for part in wtree[1:-1]:
1206 if isinstance(part, list):
1207 if part[0]=='"':
1208 splits = split_positional(part)
1209 if len(splits)<=1:
1210 result[-1] += [part, '']
1211 else:
1212 # Terminate the current split
1213 result[-1] += [splits[0], '']
1214 result += splits[1:-1]
1215 # Create a new split
1216 result += [['', splits[-1], '']]
1217 else:
1218 result[-1] += [part, '']
1219 else:
1220 splits = self._env.split_fields(part)
1221 if len(splits)<=1:
1222 # No split
1223 result[-1][-1] += part
1224 else:
1225 # Terminate the current resulting part and create a new one
1226 result[-1][-1] += splits[0]
1227 result[-1].append('')
1228 result += [['', r, ''] for r in splits[1:-1]]
1229 result += [['', splits[-1]]]
1230 result[-1].append('')
1231
1232 # Leading and trailing empty groups come from leading/trailing blanks
1233 if result and is_empty(result[-1]):
1234 result[-1:] = []
1235 if result and is_empty(result[0]):
1236 result[:1] = []
1237 return result
1238
1239 def _expand_pathname(self, wtree):
1240 """See [2.6.6 Pathname Expansion]."""
1241 if self._env.has_opt('-f'):
1242 return [wtree]
1243
1244 # All expansions have been performed, only quoted sequences should remain
1245 # in the tree. Generate the pattern by folding the tree, escaping special
1246 # characters when appear quoted
1247 special_chars = '*?[]'
1248
1249 def make_pattern(wtree):
1250 subpattern = []
1251 for part in wtree[1:-1]:
1252 if isinstance(part, list):
1253 part = make_pattern(part)
1254 elif wtree[0]!='':
1255 for c in part:
1256 # Meta-characters cannot be quoted
1257 if c in special_chars:
1258 raise GlobError()
1259 subpattern.append(part)
1260 return ''.join(subpattern)
1261
1262 def pwd_glob(pattern):
1263 cwd = os.getcwd()
1264 os.chdir(self._env['PWD'])
1265 try:
1266 return glob.glob(pattern)
1267 finally:
1268 os.chdir(cwd)
1269
1270 #TODO: check working directory issues here wrt relative patterns
1271 try:
1272 pattern = make_pattern(wtree)
1273 paths = pwd_glob(pattern)
1274 except GlobError:
1275 # BUG: Meta-characters were found in quoted sequences. The should
1276 # have been used literally but this is unsupported in current glob module.
1277 # Instead we consider the whole tree must be used literally and
1278 # therefore there is no point in globbing. This is wrong when meta
1279 # characters are mixed with quoted meta in the same pattern like:
1280 # < foo*"py*" >
1281 paths = []
1282
1283 if not paths:
1284 return [wtree]
1285 return [['', path, ''] for path in paths]
1286
1287 def _remove_quotes(self, wtree):
1288 """See [2.6.7 Quote Removal]."""
1289
1290 def unquote(wtree):
1291 unquoted = []
1292 for part in wtree[1:-1]:
1293 if isinstance(part, list):
1294 part = unquote(part)
1295 unquoted.append(part)
1296 return ''.join(unquoted)
1297
1298 return ['', unquote(wtree), '']
1299
1300 def subshell(self, script=None, ast=None, redirs=None):
1301 """Execute the script or AST in a subshell, with inherited redirections
1302 if redirs is not None.
1303 """
1304 if redirs:
1305 sub_redirs = redirs
1306 else:
1307 sub_redirs = redirs.clone()
1308
1309 subshell = None
1310 try:
1311 subshell = Interpreter(None, self._debugflags, self._env.clone(True),
1312 sub_redirs, opts=self._options)
1313 return subshell.execute_script(script, ast)
1314 finally:
1315 if not redirs: sub_redirs.close()
1316 if subshell: subshell.close()
1317
1318 def subshell_output(self, script):
1319 """Execute the script in a subshell and return the captured output."""
1320 # Create temporary file to capture subshell output
1321 tmpfd, tmppath = tempfile.mkstemp()
1322 try:
1323 tmpfile = os.fdopen(tmpfd, 'wb')
1324 stdout = FileWrapper('w', tmpfile)
1325
1326 redirs = Redirections(self._redirs.stdin().dup(),
1327 stdout,
1328 self._redirs.stderr().dup())
1329 try:
1330 status = self.subshell(script=script, redirs=redirs)
1331 finally:
1332 redirs.close()
1333 redirs = None
1334
1335 # Extract subshell standard output
1336 tmpfile = open(tmppath, 'rb')
1337 try:
1338 output = tmpfile.read()
1339 return status, output.rstrip('\n')
1340 finally:
1341 tmpfile.close()
1342 finally:
1343 os.remove(tmppath)
1344
1345 def _asynclist(self, cmd):
1346 args = (self._env.get_variables(), cmd)
1347 arg = encodeargs(args)
1348 assert len(args) < 30*1024
1349 cmd = ['pysh.bat', '--ast', '-c', arg]
1350 p = subprocess.Popen(cmd, cwd=self._env['PWD'])
1351 self._children[p.pid] = p
1352 self._env['!'] = p.pid
1353 return 0
1354
1355 def wait(self, pids=None):
1356 if not pids:
1357 pids = self._children.keys()
1358
1359 status = 127
1360 for pid in pids:
1361 if pid not in self._children:
1362 continue
1363 p = self._children.pop(pid)
1364 status = p.wait()
1365
1366 return status
1367
diff --git a/bitbake/lib/bb/pysh/lsprof.py b/bitbake/lib/bb/pysh/lsprof.py
new file mode 100644
index 0000000000..b1831c22a7
--- /dev/null
+++ b/bitbake/lib/bb/pysh/lsprof.py
@@ -0,0 +1,116 @@
1#! /usr/bin/env python
2
3import sys
4from _lsprof import Profiler, profiler_entry
5
6__all__ = ['profile', 'Stats']
7
8def profile(f, *args, **kwds):
9 """XXX docstring"""
10 p = Profiler()
11 p.enable(subcalls=True, builtins=True)
12 try:
13 f(*args, **kwds)
14 finally:
15 p.disable()
16 return Stats(p.getstats())
17
18
19class Stats(object):
20 """XXX docstring"""
21
22 def __init__(self, data):
23 self.data = data
24
25 def sort(self, crit="inlinetime"):
26 """XXX docstring"""
27 if crit not in profiler_entry.__dict__:
28 raise ValueError("Can't sort by %s" % crit)
29 self.data.sort(lambda b, a: cmp(getattr(a, crit),
30 getattr(b, crit)))
31 for e in self.data:
32 if e.calls:
33 e.calls.sort(lambda b, a: cmp(getattr(a, crit),
34 getattr(b, crit)))
35
36 def pprint(self, top=None, file=None, limit=None, climit=None):
37 """XXX docstring"""
38 if file is None:
39 file = sys.stdout
40 d = self.data
41 if top is not None:
42 d = d[:top]
43 cols = "% 12s %12s %11.4f %11.4f %s\n"
44 hcols = "% 12s %12s %12s %12s %s\n"
45 cols2 = "+%12s %12s %11.4f %11.4f + %s\n"
46 file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
47 "Inline(ms)", "module:lineno(function)"))
48 count = 0
49 for e in d:
50 file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
51 e.inlinetime, label(e.code)))
52 count += 1
53 if limit is not None and count == limit:
54 return
55 ccount = 0
56 if e.calls:
57 for se in e.calls:
58 file.write(cols % ("+%s" % se.callcount, se.reccallcount,
59 se.totaltime, se.inlinetime,
60 "+%s" % label(se.code)))
61 count += 1
62 ccount += 1
63 if limit is not None and count == limit:
64 return
65 if climit is not None and ccount == climit:
66 break
67
68 def freeze(self):
69 """Replace all references to code objects with string
70 descriptions; this makes it possible to pickle the instance."""
71
72 # this code is probably rather ickier than it needs to be!
73 for i in range(len(self.data)):
74 e = self.data[i]
75 if not isinstance(e.code, str):
76 self.data[i] = type(e)((label(e.code),) + e[1:])
77 if e.calls:
78 for j in range(len(e.calls)):
79 se = e.calls[j]
80 if not isinstance(se.code, str):
81 e.calls[j] = type(se)((label(se.code),) + se[1:])
82
83_fn2mod = {}
84
85def label(code):
86 if isinstance(code, str):
87 return code
88 try:
89 mname = _fn2mod[code.co_filename]
90 except KeyError:
91 for k, v in sys.modules.items():
92 if v is None:
93 continue
94 if not hasattr(v, '__file__'):
95 continue
96 if not isinstance(v.__file__, str):
97 continue
98 if v.__file__.startswith(code.co_filename):
99 mname = _fn2mod[code.co_filename] = k
100 break
101 else:
102 mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
103
104 return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
105
106
107if __name__ == '__main__':
108 import os
109 sys.argv = sys.argv[1:]
110 if not sys.argv:
111 print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
112 sys.exit(2)
113 sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
114 stats = profile(execfile, sys.argv[0], globals(), locals())
115 stats.sort()
116 stats.pprint()
diff --git a/bitbake/lib/bb/pysh/pysh.py b/bitbake/lib/bb/pysh/pysh.py
new file mode 100644
index 0000000000..b4e6145b51
--- /dev/null
+++ b/bitbake/lib/bb/pysh/pysh.py
@@ -0,0 +1,167 @@
1# pysh.py - command processing for pysh.
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8import optparse
9import os
10import sys
11
12import interp
13
14SH_OPT = optparse.OptionParser(prog='pysh', usage="%prog [OPTIONS]", version='0.1')
15SH_OPT.add_option('-c', action='store_true', dest='command_string', default=None,
16 help='A string that shall be interpreted by the shell as one or more commands')
17SH_OPT.add_option('--redirect-to', dest='redirect_to', default=None,
18 help='Redirect script commands stdout and stderr to the specified file')
19# See utility_command in builtin.py about the reason for this flag.
20SH_OPT.add_option('--redirected', dest='redirected', action='store_true', default=False,
21 help='Tell the interpreter that stdout and stderr are actually the same objects, which is really stdout')
22SH_OPT.add_option('--debug-parsing', action='store_true', dest='debug_parsing', default=False,
23 help='Trace PLY execution')
24SH_OPT.add_option('--debug-tree', action='store_true', dest='debug_tree', default=False,
25 help='Display the generated syntax tree.')
26SH_OPT.add_option('--debug-cmd', action='store_true', dest='debug_cmd', default=False,
27 help='Trace command execution before parameters expansion and exit status.')
28SH_OPT.add_option('--debug-utility', action='store_true', dest='debug_utility', default=False,
29 help='Trace utility calls, after parameters expansions')
30SH_OPT.add_option('--ast', action='store_true', dest='ast', default=False,
31 help='Encoded commands to execute in a subprocess')
32SH_OPT.add_option('--profile', action='store_true', default=False,
33 help='Profile pysh run')
34
35
36def split_args(args):
37 # Separate shell arguments from command ones
38 # Just stop at the first argument not starting with a dash. I know, this is completely broken,
39 # it ignores files starting with a dash or may take option values for command file. This is not
40 # supposed to happen for now
41 command_index = len(args)
42 for i,arg in enumerate(args):
43 if not arg.startswith('-'):
44 command_index = i
45 break
46
47 return args[:command_index], args[command_index:]
48
49
50def fixenv(env):
51 path = env.get('PATH')
52 if path is not None:
53 parts = path.split(os.pathsep)
54 # Remove Windows utilities from PATH, they are useless at best and
55 # some of them (find) may be confused with other utilities.
56 parts = [p for p in parts if 'system32' not in p.lower()]
57 env['PATH'] = os.pathsep.join(parts)
58 if env.get('HOME') is None:
59 # Several utilities, including cvsps, cannot work without
60 # a defined HOME directory.
61 env['HOME'] = os.path.expanduser('~')
62 return env
63
64def _sh(cwd, shargs, cmdargs, options, debugflags=None, env=None):
65 if os.environ.get('PYSH_TEXT') != '1':
66 import msvcrt
67 for fp in (sys.stdin, sys.stdout, sys.stderr):
68 msvcrt.setmode(fp.fileno(), os.O_BINARY)
69
70 hgbin = os.environ.get('PYSH_HGTEXT') != '1'
71
72 if debugflags is None:
73 debugflags = []
74 if options.debug_parsing: debugflags.append('debug-parsing')
75 if options.debug_utility: debugflags.append('debug-utility')
76 if options.debug_cmd: debugflags.append('debug-cmd')
77 if options.debug_tree: debugflags.append('debug-tree')
78
79 if env is None:
80 env = fixenv(dict(os.environ))
81 if cwd is None:
82 cwd = os.getcwd()
83
84 if not cmdargs:
85 # Nothing to do
86 return 0
87
88 ast = None
89 command_file = None
90 if options.command_string:
91 input = cmdargs[0]
92 if not options.ast:
93 input += '\n'
94 else:
95 args, input = interp.decodeargs(input), None
96 env, ast = args
97 cwd = env.get('PWD', cwd)
98 else:
99 command_file = cmdargs[0]
100 arguments = cmdargs[1:]
101
102 prefix = interp.resolve_shebang(command_file, ignoreshell=True)
103 if prefix:
104 input = ' '.join(prefix + [command_file] + arguments)
105 else:
106 # Read commands from file
107 f = file(command_file)
108 try:
109 # Trailing newline to help the parser
110 input = f.read() + '\n'
111 finally:
112 f.close()
113
114 redirect = None
115 try:
116 if options.redirected:
117 stdout = sys.stdout
118 stderr = stdout
119 elif options.redirect_to:
120 redirect = open(options.redirect_to, 'wb')
121 stdout = redirect
122 stderr = redirect
123 else:
124 stdout = sys.stdout
125 stderr = sys.stderr
126
127 # TODO: set arguments to environment variables
128 opts = interp.Options()
129 opts.hgbinary = hgbin
130 ip = interp.Interpreter(cwd, debugflags, stdout=stdout, stderr=stderr,
131 opts=opts)
132 try:
133 # Export given environment in shell object
134 for k,v in env.iteritems():
135 ip.get_env().export(k,v)
136 return ip.execute_script(input, ast, scriptpath=command_file)
137 finally:
138 ip.close()
139 finally:
140 if redirect is not None:
141 redirect.close()
142
143def sh(cwd=None, args=None, debugflags=None, env=None):
144 if args is None:
145 args = sys.argv[1:]
146 shargs, cmdargs = split_args(args)
147 options, shargs = SH_OPT.parse_args(shargs)
148
149 if options.profile:
150 import lsprof
151 p = lsprof.Profiler()
152 p.enable(subcalls=True)
153 try:
154 return _sh(cwd, shargs, cmdargs, options, debugflags, env)
155 finally:
156 p.disable()
157 stats = lsprof.Stats(p.getstats())
158 stats.sort()
159 stats.pprint(top=10, file=sys.stderr, climit=5)
160 else:
161 return _sh(cwd, shargs, cmdargs, options, debugflags, env)
162
163def main():
164 sys.exit(sh())
165
166if __name__=='__main__':
167 main()
diff --git a/bitbake/lib/bb/pysh/pyshlex.py b/bitbake/lib/bb/pysh/pyshlex.py
new file mode 100644
index 0000000000..b30123675c
--- /dev/null
+++ b/bitbake/lib/bb/pysh/pyshlex.py
@@ -0,0 +1,888 @@
1# pyshlex.py - PLY compatible lexer for pysh.
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8# TODO:
9# - review all "char in 'abc'" snippets: the empty string can be matched
10# - test line continuations within quoted/expansion strings
11# - eof is buggy wrt sublexers
12# - the lexer cannot really work in pull mode as it would be required to run
13# PLY in pull mode. It was designed to work incrementally and it would not be
14# that hard to enable pull mode.
15import re
16try:
17 s = set()
18 del s
19except NameError:
20 from Set import Set as set
21
22from ply import lex
23from sherrors import *
24
25class NeedMore(Exception):
26 pass
27
28def is_blank(c):
29 return c in (' ', '\t')
30
31_RE_DIGITS = re.compile(r'^\d+$')
32
33def are_digits(s):
34 return _RE_DIGITS.search(s) is not None
35
36_OPERATORS = dict([
37 ('&&', 'AND_IF'),
38 ('||', 'OR_IF'),
39 (';;', 'DSEMI'),
40 ('<<', 'DLESS'),
41 ('>>', 'DGREAT'),
42 ('<&', 'LESSAND'),
43 ('>&', 'GREATAND'),
44 ('<>', 'LESSGREAT'),
45 ('<<-', 'DLESSDASH'),
46 ('>|', 'CLOBBER'),
47 ('&', 'AMP'),
48 (';', 'COMMA'),
49 ('<', 'LESS'),
50 ('>', 'GREATER'),
51 ('(', 'LPARENS'),
52 (')', 'RPARENS'),
53])
54
55#Make a function to silence pychecker "Local variable shadows global"
56def make_partial_ops():
57 partials = {}
58 for k in _OPERATORS:
59 for i in range(1, len(k)+1):
60 partials[k[:i]] = None
61 return partials
62
63_PARTIAL_OPERATORS = make_partial_ops()
64
65def is_partial_op(s):
66 """Return True if s matches a non-empty subpart of an operator starting
67 at its first character.
68 """
69 return s in _PARTIAL_OPERATORS
70
71def is_op(s):
72 """If s matches an operator, returns the operator identifier. Return None
73 otherwise.
74 """
75 return _OPERATORS.get(s)
76
77_RESERVEDS = dict([
78 ('if', 'If'),
79 ('then', 'Then'),
80 ('else', 'Else'),
81 ('elif', 'Elif'),
82 ('fi', 'Fi'),
83 ('do', 'Do'),
84 ('done', 'Done'),
85 ('case', 'Case'),
86 ('esac', 'Esac'),
87 ('while', 'While'),
88 ('until', 'Until'),
89 ('for', 'For'),
90 ('{', 'Lbrace'),
91 ('}', 'Rbrace'),
92 ('!', 'Bang'),
93 ('in', 'In'),
94 ('|', 'PIPE'),
95])
96
97def get_reserved(s):
98 return _RESERVEDS.get(s)
99
100_RE_NAME = re.compile(r'^[0-9a-zA-Z_]+$')
101
102def is_name(s):
103 return _RE_NAME.search(s) is not None
104
105def find_chars(seq, chars):
106 for i,v in enumerate(seq):
107 if v in chars:
108 return i,v
109 return -1, None
110
111class WordLexer:
112 """WordLexer parse quoted or expansion expressions and return an expression
113 tree. The input string can be any well formed sequence beginning with quoting
114 or expansion character. Embedded expressions are handled recursively. The
115 resulting tree is made of lists and strings. Lists represent quoted or
116 expansion expressions. Each list first element is the opening separator,
117 the last one the closing separator. In-between can be any number of strings
118 or lists for sub-expressions. Non quoted/expansion expression can written as
119 strings or as lists with empty strings as starting and ending delimiters.
120 """
121
122 NAME_CHARSET = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
123 NAME_CHARSET = dict(zip(NAME_CHARSET, NAME_CHARSET))
124
125 SPECIAL_CHARSET = '@*#?-$!0'
126
127 #Characters which can be escaped depends on the current delimiters
128 ESCAPABLE = {
129 '`': set(['$', '\\', '`']),
130 '"': set(['$', '\\', '`', '"']),
131 "'": set(),
132 }
133
134 def __init__(self, heredoc = False):
135 # _buffer is the unprocessed input characters buffer
136 self._buffer = []
137 # _stack is empty or contains a quoted list being processed
138 # (this is the DFS path to the quoted expression being evaluated).
139 self._stack = []
140 self._escapable = None
141 # True when parsing unquoted here documents
142 self._heredoc = heredoc
143
144 def add(self, data, eof=False):
145 """Feed the lexer with more data. If the quoted expression can be
146 delimited, return a tuple (expr, remaining) containing the expression
147 tree and the unconsumed data.
148 Otherwise, raise NeedMore.
149 """
150 self._buffer += list(data)
151 self._parse(eof)
152
153 result = self._stack[0]
154 remaining = ''.join(self._buffer)
155 self._stack = []
156 self._buffer = []
157 return result, remaining
158
159 def _is_escapable(self, c, delim=None):
160 if delim is None:
161 if self._heredoc:
162 # Backslashes works as if they were double quoted in unquoted
163 # here-documents
164 delim = '"'
165 else:
166 if len(self._stack)<=1:
167 return True
168 delim = self._stack[-2][0]
169
170 escapables = self.ESCAPABLE.get(delim, None)
171 return escapables is None or c in escapables
172
173 def _parse_squote(self, buf, result, eof):
174 if not buf:
175 raise NeedMore()
176 try:
177 pos = buf.index("'")
178 except ValueError:
179 raise NeedMore()
180 result[-1] += ''.join(buf[:pos])
181 result += ["'"]
182 return pos+1, True
183
184 def _parse_bquote(self, buf, result, eof):
185 if not buf:
186 raise NeedMore()
187
188 if buf[0]=='\n':
189 #Remove line continuations
190 result[:] = ['', '', '']
191 elif self._is_escapable(buf[0]):
192 result[-1] += buf[0]
193 result += ['']
194 else:
195 #Keep as such
196 result[:] = ['', '\\'+buf[0], '']
197
198 return 1, True
199
200 def _parse_dquote(self, buf, result, eof):
201 if not buf:
202 raise NeedMore()
203 pos, sep = find_chars(buf, '$\\`"')
204 if pos==-1:
205 raise NeedMore()
206
207 result[-1] += ''.join(buf[:pos])
208 if sep=='"':
209 result += ['"']
210 return pos+1, True
211 else:
212 #Keep everything until the separator and defer processing
213 return pos, False
214
215 def _parse_command(self, buf, result, eof):
216 if not buf:
217 raise NeedMore()
218
219 chars = '$\\`"\''
220 if result[0] == '$(':
221 chars += ')'
222 pos, sep = find_chars(buf, chars)
223 if pos == -1:
224 raise NeedMore()
225
226 result[-1] += ''.join(buf[:pos])
227 if (result[0]=='$(' and sep==')') or (result[0]=='`' and sep=='`'):
228 result += [sep]
229 return pos+1, True
230 else:
231 return pos, False
232
233 def _parse_parameter(self, buf, result, eof):
234 if not buf:
235 raise NeedMore()
236
237 pos, sep = find_chars(buf, '$\\`"\'}')
238 if pos==-1:
239 raise NeedMore()
240
241 result[-1] += ''.join(buf[:pos])
242 if sep=='}':
243 result += [sep]
244 return pos+1, True
245 else:
246 return pos, False
247
248 def _parse_dollar(self, buf, result, eof):
249 sep = result[0]
250 if sep=='$':
251 if not buf:
252 #TODO: handle empty $
253 raise NeedMore()
254 if buf[0]=='(':
255 if len(buf)==1:
256 raise NeedMore()
257
258 if buf[1]=='(':
259 result[0] = '$(('
260 buf[:2] = []
261 else:
262 result[0] = '$('
263 buf[:1] = []
264
265 elif buf[0]=='{':
266 result[0] = '${'
267 buf[:1] = []
268 else:
269 if buf[0] in self.SPECIAL_CHARSET:
270 result[-1] = buf[0]
271 read = 1
272 else:
273 for read,c in enumerate(buf):
274 if c not in self.NAME_CHARSET:
275 break
276 else:
277 if not eof:
278 raise NeedMore()
279 read += 1
280
281 result[-1] += ''.join(buf[0:read])
282
283 if not result[-1]:
284 result[:] = ['', result[0], '']
285 else:
286 result += ['']
287 return read,True
288
289 sep = result[0]
290 if sep=='$(':
291 parsefunc = self._parse_command
292 elif sep=='${':
293 parsefunc = self._parse_parameter
294 else:
295 raise NotImplementedError(sep)
296
297 pos, closed = parsefunc(buf, result, eof)
298 return pos, closed
299
300 def _parse(self, eof):
301 buf = self._buffer
302 stack = self._stack
303 recurse = False
304
305 while 1:
306 if not stack or recurse:
307 if not buf:
308 raise NeedMore()
309 if buf[0] not in ('"\\`$\''):
310 raise ShellSyntaxError('Invalid quoted string sequence')
311 stack.append([buf[0], ''])
312 buf[:1] = []
313 recurse = False
314
315 result = stack[-1]
316 if result[0]=="'":
317 parsefunc = self._parse_squote
318 elif result[0]=='\\':
319 parsefunc = self._parse_bquote
320 elif result[0]=='"':
321 parsefunc = self._parse_dquote
322 elif result[0]=='`':
323 parsefunc = self._parse_command
324 elif result[0][0]=='$':
325 parsefunc = self._parse_dollar
326 else:
327 raise NotImplementedError()
328
329 read, closed = parsefunc(buf, result, eof)
330
331 buf[:read] = []
332 if closed:
333 if len(stack)>1:
334 #Merge in parent expression
335 parsed = stack.pop()
336 stack[-1] += [parsed]
337 stack[-1] += ['']
338 else:
339 break
340 else:
341 recurse = True
342
343def normalize_wordtree(wtree):
344 """Fold back every literal sequence (delimited with empty strings) into
345 parent sequence.
346 """
347 def normalize(wtree):
348 result = []
349 for part in wtree[1:-1]:
350 if isinstance(part, list):
351 part = normalize(part)
352 if part[0]=='':
353 #Move the part content back at current level
354 result += part[1:-1]
355 continue
356 elif not part:
357 #Remove empty strings
358 continue
359 result.append(part)
360 if not result:
361 result = ['']
362 return [wtree[0]] + result + [wtree[-1]]
363
364 return normalize(wtree)
365
366
367def make_wordtree(token, here_document=False):
368 """Parse a delimited token and return a tree similar to the ones returned by
369 WordLexer. token may contain any combinations of expansion/quoted fields and
370 non-ones.
371 """
372 tree = ['']
373 remaining = token
374 delimiters = '\\$`'
375 if not here_document:
376 delimiters += '\'"'
377
378 while 1:
379 pos, sep = find_chars(remaining, delimiters)
380 if pos==-1:
381 tree += [remaining, '']
382 return normalize_wordtree(tree)
383 tree.append(remaining[:pos])
384 remaining = remaining[pos:]
385
386 try:
387 result, remaining = WordLexer(heredoc = here_document).add(remaining, True)
388 except NeedMore:
389 raise ShellSyntaxError('Invalid token "%s"')
390 tree.append(result)
391
392
393def wordtree_as_string(wtree):
394 """Rewrite an expression tree generated by make_wordtree as string."""
395 def visit(node, output):
396 for child in node:
397 if isinstance(child, list):
398 visit(child, output)
399 else:
400 output.append(child)
401
402 output = []
403 visit(wtree, output)
404 return ''.join(output)
405
406
407def unquote_wordtree(wtree):
408 """Fold the word tree while removing quotes everywhere. Other expansion
409 sequences are joined as such.
410 """
411 def unquote(wtree):
412 unquoted = []
413 if wtree[0] in ('', "'", '"', '\\'):
414 wtree = wtree[1:-1]
415
416 for part in wtree:
417 if isinstance(part, list):
418 part = unquote(part)
419 unquoted.append(part)
420 return ''.join(unquoted)
421
422 return unquote(wtree)
423
424
425class HereDocLexer:
426 """HereDocLexer delimits whatever comes from the here-document starting newline
427 not included to the closing delimiter line included.
428 """
429 def __init__(self, op, delim):
430 assert op in ('<<', '<<-')
431 if not delim:
432 raise ShellSyntaxError('invalid here document delimiter %s' % str(delim))
433
434 self._op = op
435 self._delim = delim
436 self._buffer = []
437 self._token = []
438
439 def add(self, data, eof):
440 """If the here-document was delimited, return a tuple (content, remaining).
441 Raise NeedMore() otherwise.
442 """
443 self._buffer += list(data)
444 self._parse(eof)
445 token = ''.join(self._token)
446 remaining = ''.join(self._buffer)
447 self._token, self._remaining = [], []
448 return token, remaining
449
450 def _parse(self, eof):
451 while 1:
452 #Look for first unescaped newline. Quotes may be ignored
453 escaped = False
454 for i,c in enumerate(self._buffer):
455 if escaped:
456 escaped = False
457 elif c=='\\':
458 escaped = True
459 elif c=='\n':
460 break
461 else:
462 i = -1
463
464 if i==-1 or self._buffer[i]!='\n':
465 if not eof:
466 raise NeedMore()
467 #No more data, maybe the last line is closing delimiter
468 line = ''.join(self._buffer)
469 eol = ''
470 self._buffer[:] = []
471 else:
472 line = ''.join(self._buffer[:i])
473 eol = self._buffer[i]
474 self._buffer[:i+1] = []
475
476 if self._op=='<<-':
477 line = line.lstrip('\t')
478
479 if line==self._delim:
480 break
481
482 self._token += [line, eol]
483 if i==-1:
484 break
485
486class Token:
487 #TODO: check this is still in use
488 OPERATOR = 'OPERATOR'
489 WORD = 'WORD'
490
491 def __init__(self):
492 self.value = ''
493 self.type = None
494
495 def __getitem__(self, key):
496 #Behave like a two elements tuple
497 if key==0:
498 return self.type
499 if key==1:
500 return self.value
501 raise IndexError(key)
502
503
504class HereDoc:
505 def __init__(self, op, name=None):
506 self.op = op
507 self.name = name
508 self.pendings = []
509
510TK_COMMA = 'COMMA'
511TK_AMPERSAND = 'AMP'
512TK_OP = 'OP'
513TK_TOKEN = 'TOKEN'
514TK_COMMENT = 'COMMENT'
515TK_NEWLINE = 'NEWLINE'
516TK_IONUMBER = 'IO_NUMBER'
517TK_ASSIGNMENT = 'ASSIGNMENT_WORD'
518TK_HERENAME = 'HERENAME'
519
520class Lexer:
521 """Main lexer.
522
523 Call add() until the script AST is returned.
524 """
525 # Here-document handling makes the whole thing more complex because they basically
526 # force tokens to be reordered: here-content must come right after the operator
527 # and the here-document name, while some other tokens might be following the
528 # here-document expression on the same line.
529 #
530 # So, here-doc states are basically:
531 # *self._state==ST_NORMAL
532 # - self._heredoc.op is None: no here-document
533 # - self._heredoc.op is not None but name is: here-document operator matched,
534 # waiting for the document name/delimiter
535 # - self._heredoc.op and name are not None: here-document is ready, following
536 # tokens are being stored and will be pushed again when the document is
537 # completely parsed.
538 # *self._state==ST_HEREDOC
539 # - The here-document is being delimited by self._herelexer. Once it is done
540 # the content is pushed in front of the pending token list then all these
541 # tokens are pushed once again.
542 ST_NORMAL = 'ST_NORMAL'
543 ST_OP = 'ST_OP'
544 ST_BACKSLASH = 'ST_BACKSLASH'
545 ST_QUOTED = 'ST_QUOTED'
546 ST_COMMENT = 'ST_COMMENT'
547 ST_HEREDOC = 'ST_HEREDOC'
548
549 #Match end of backquote strings
550 RE_BACKQUOTE_END = re.compile(r'(?<!\\)(`)')
551
552 def __init__(self, parent_state = None):
553 self._input = []
554 self._pos = 0
555
556 self._token = ''
557 self._type = TK_TOKEN
558
559 self._state = self.ST_NORMAL
560 self._parent_state = parent_state
561 self._wordlexer = None
562
563 self._heredoc = HereDoc(None)
564 self._herelexer = None
565
566 ### Following attributes are not used for delimiting token and can safely
567 ### be changed after here-document detection (see _push_toke)
568
569 # Count the number of tokens following a 'For' reserved word. Needed to
570 # return an 'In' reserved word if it comes in third place.
571 self._for_count = None
572
573 def add(self, data, eof=False):
574 """Feed the lexer with data.
575
576 When eof is set to True, returns unconsumed data or raise if the lexer
577 is in the middle of a delimiting operation.
578 Raise NeedMore otherwise.
579 """
580 self._input += list(data)
581 self._parse(eof)
582 self._input[:self._pos] = []
583 return ''.join(self._input)
584
585 def _parse(self, eof):
586 while self._state:
587 if self._pos>=len(self._input):
588 if not eof:
589 raise NeedMore()
590 elif self._state not in (self.ST_OP, self.ST_QUOTED, self.ST_HEREDOC):
591 #Delimit the current token and leave cleanly
592 self._push_token('')
593 break
594 else:
595 #Let the sublexer handle the eof themselves
596 pass
597
598 if self._state==self.ST_NORMAL:
599 self._parse_normal()
600 elif self._state==self.ST_COMMENT:
601 self._parse_comment()
602 elif self._state==self.ST_OP:
603 self._parse_op(eof)
604 elif self._state==self.ST_QUOTED:
605 self._parse_quoted(eof)
606 elif self._state==self.ST_HEREDOC:
607 self._parse_heredoc(eof)
608 else:
609 assert False, "Unknown state " + str(self._state)
610
611 if self._heredoc.op is not None:
612 raise ShellSyntaxError('missing here-document delimiter')
613
614 def _parse_normal(self):
615 c = self._input[self._pos]
616 if c=='\n':
617 self._push_token(c)
618 self._token = c
619 self._type = TK_NEWLINE
620 self._push_token('')
621 self._pos += 1
622 elif c in ('\\', '\'', '"', '`', '$'):
623 self._state = self.ST_QUOTED
624 elif is_partial_op(c):
625 self._push_token(c)
626
627 self._type = TK_OP
628 self._token += c
629 self._pos += 1
630 self._state = self.ST_OP
631 elif is_blank(c):
632 self._push_token(c)
633
634 #Discard blanks
635 self._pos += 1
636 elif self._token:
637 self._token += c
638 self._pos += 1
639 elif c=='#':
640 self._state = self.ST_COMMENT
641 self._type = TK_COMMENT
642 self._pos += 1
643 else:
644 self._pos += 1
645 self._token += c
646
647 def _parse_op(self, eof):
648 assert self._token
649
650 while 1:
651 if self._pos>=len(self._input):
652 if not eof:
653 raise NeedMore()
654 c = ''
655 else:
656 c = self._input[self._pos]
657
658 op = self._token + c
659 if c and is_partial_op(op):
660 #Still parsing an operator
661 self._token = op
662 self._pos += 1
663 else:
664 #End of operator
665 self._push_token(c)
666 self._state = self.ST_NORMAL
667 break
668
669 def _parse_comment(self):
670 while 1:
671 if self._pos>=len(self._input):
672 raise NeedMore()
673
674 c = self._input[self._pos]
675 if c=='\n':
676 #End of comment, do not consume the end of line
677 self._state = self.ST_NORMAL
678 break
679 else:
680 self._token += c
681 self._pos += 1
682
683 def _parse_quoted(self, eof):
684 """Precondition: the starting backquote/dollar is still in the input queue."""
685 if not self._wordlexer:
686 self._wordlexer = WordLexer()
687
688 if self._pos<len(self._input):
689 #Transfer input queue character into the subparser
690 input = self._input[self._pos:]
691 self._pos += len(input)
692
693 wtree, remaining = self._wordlexer.add(input, eof)
694 self._wordlexer = None
695 self._token += wordtree_as_string(wtree)
696
697 #Put unparsed character back in the input queue
698 if remaining:
699 self._input[self._pos:self._pos] = list(remaining)
700 self._state = self.ST_NORMAL
701
702 def _parse_heredoc(self, eof):
703 assert not self._token
704
705 if self._herelexer is None:
706 self._herelexer = HereDocLexer(self._heredoc.op, self._heredoc.name)
707
708 if self._pos<len(self._input):
709 #Transfer input queue character into the subparser
710 input = self._input[self._pos:]
711 self._pos += len(input)
712
713 self._token, remaining = self._herelexer.add(input, eof)
714
715 #Reset here-document state
716 self._herelexer = None
717 heredoc, self._heredoc = self._heredoc, HereDoc(None)
718 if remaining:
719 self._input[self._pos:self._pos] = list(remaining)
720 self._state = self.ST_NORMAL
721
722 #Push pending tokens
723 heredoc.pendings[:0] = [(self._token, self._type, heredoc.name)]
724 for token, type, delim in heredoc.pendings:
725 self._token = token
726 self._type = type
727 self._push_token(delim)
728
729 def _push_token(self, delim):
730 if not self._token:
731 return 0
732
733 if self._heredoc.op is not None:
734 if self._heredoc.name is None:
735 #Here-document name
736 if self._type!=TK_TOKEN:
737 raise ShellSyntaxError("expecting here-document name, got '%s'" % self._token)
738 self._heredoc.name = unquote_wordtree(make_wordtree(self._token))
739 self._type = TK_HERENAME
740 else:
741 #Capture all tokens until the newline starting the here-document
742 if self._type==TK_NEWLINE:
743 assert self._state==self.ST_NORMAL
744 self._state = self.ST_HEREDOC
745
746 self._heredoc.pendings.append((self._token, self._type, delim))
747 self._token = ''
748 self._type = TK_TOKEN
749 return 1
750
751 # BEWARE: do not change parser state from here to the end of the function:
752 # when parsing between an here-document operator to the end of the line
753 # tokens are stored in self._heredoc.pendings. Therefore, they will not
754 # reach the section below.
755
756 #Check operators
757 if self._type==TK_OP:
758 #False positive because of partial op matching
759 op = is_op(self._token)
760 if not op:
761 self._type = TK_TOKEN
762 else:
763 #Map to the specific operator
764 self._type = op
765 if self._token in ('<<', '<<-'):
766 #Done here rather than in _parse_op because there is no need
767 #to change the parser state since we are still waiting for
768 #the here-document name
769 if self._heredoc.op is not None:
770 raise ShellSyntaxError("syntax error near token '%s'" % self._token)
771 assert self._heredoc.op is None
772 self._heredoc.op = self._token
773
774 if self._type==TK_TOKEN:
775 if '=' in self._token and not delim:
776 if self._token.startswith('='):
777 #Token is a WORD... a TOKEN that is.
778 pass
779 else:
780 prev = self._token[:self._token.find('=')]
781 if is_name(prev):
782 self._type = TK_ASSIGNMENT
783 else:
784 #Just a token (unspecified)
785 pass
786 else:
787 reserved = get_reserved(self._token)
788 if reserved is not None:
789 if reserved=='In' and self._for_count!=2:
790 #Sorry, not a reserved word after all
791 pass
792 else:
793 self._type = reserved
794 if reserved in ('For', 'Case'):
795 self._for_count = 0
796 elif are_digits(self._token) and delim in ('<', '>'):
797 #Detect IO_NUMBER
798 self._type = TK_IONUMBER
799 elif self._token==';':
800 self._type = TK_COMMA
801 elif self._token=='&':
802 self._type = TK_AMPERSAND
803 elif self._type==TK_COMMENT:
804 #Comments are not part of sh grammar, ignore them
805 self._token = ''
806 self._type = TK_TOKEN
807 return 0
808
809 if self._for_count is not None:
810 #Track token count in 'For' expression to detect 'In' reserved words.
811 #Can only be in third position, no need to go beyond
812 self._for_count += 1
813 if self._for_count==3:
814 self._for_count = None
815
816 self.on_token((self._token, self._type))
817 self._token = ''
818 self._type = TK_TOKEN
819 return 1
820
821 def on_token(self, token):
822 raise NotImplementedError
823
824
825tokens = [
826 TK_TOKEN,
827# To silence yacc unused token warnings
828# TK_COMMENT,
829 TK_NEWLINE,
830 TK_IONUMBER,
831 TK_ASSIGNMENT,
832 TK_HERENAME,
833]
834
835#Add specific operators
836tokens += _OPERATORS.values()
837#Add reserved words
838tokens += _RESERVEDS.values()
839
840class PLYLexer(Lexer):
841 """Bridge Lexer and PLY lexer interface."""
842 def __init__(self):
843 Lexer.__init__(self)
844 self._tokens = []
845 self._current = 0
846 self.lineno = 0
847
848 def on_token(self, token):
849 value, type = token
850
851 self.lineno = 0
852 t = lex.LexToken()
853 t.value = value
854 t.type = type
855 t.lexer = self
856 t.lexpos = 0
857 t.lineno = 0
858
859 self._tokens.append(t)
860
861 def is_empty(self):
862 return not bool(self._tokens)
863
864 #PLY compliant interface
865 def token(self):
866 if self._current>=len(self._tokens):
867 return None
868 t = self._tokens[self._current]
869 self._current += 1
870 return t
871
872
873def get_tokens(s):
874 """Parse the input string and return a tuple (tokens, unprocessed) where
875 tokens is a list of parsed tokens and unprocessed is the part of the input
876 string left untouched by the lexer.
877 """
878 lexer = PLYLexer()
879 untouched = lexer.add(s, True)
880 tokens = []
881 while 1:
882 token = lexer.token()
883 if token is None:
884 break
885 tokens.append(token)
886
887 tokens = [(t.value, t.type) for t in tokens]
888 return tokens, untouched
diff --git a/bitbake/lib/bb/pysh/pyshyacc.py b/bitbake/lib/bb/pysh/pyshyacc.py
new file mode 100644
index 0000000000..e8e80aac45
--- /dev/null
+++ b/bitbake/lib/bb/pysh/pyshyacc.py
@@ -0,0 +1,779 @@
1# pyshyacc.py - PLY grammar definition for pysh
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8"""PLY grammar file.
9"""
10import os.path
11import sys
12
13import pyshlex
14tokens = pyshlex.tokens
15
16from ply import yacc
17import sherrors
18
19class IORedirect:
20 def __init__(self, op, filename, io_number=None):
21 self.op = op
22 self.filename = filename
23 self.io_number = io_number
24
25class HereDocument:
26 def __init__(self, op, name, content, io_number=None):
27 self.op = op
28 self.name = name
29 self.content = content
30 self.io_number = io_number
31
32def make_io_redirect(p):
33 """Make an IORedirect instance from the input 'io_redirect' production."""
34 name, io_number, io_target = p
35 assert name=='io_redirect'
36
37 if io_target[0]=='io_file':
38 io_type, io_op, io_file = io_target
39 return IORedirect(io_op, io_file, io_number)
40 elif io_target[0]=='io_here':
41 io_type, io_op, io_name, io_content = io_target
42 return HereDocument(io_op, io_name, io_content, io_number)
43 else:
44 assert False, "Invalid IO redirection token %s" % repr(io_type)
45
46class SimpleCommand:
47 """
48 assigns contains (name, value) pairs.
49 """
50 def __init__(self, words, redirs, assigns):
51 self.words = list(words)
52 self.redirs = list(redirs)
53 self.assigns = list(assigns)
54
55class Pipeline:
56 def __init__(self, commands, reverse_status=False):
57 self.commands = list(commands)
58 assert self.commands #Grammar forbids this
59 self.reverse_status = reverse_status
60
61class AndOr:
62 def __init__(self, op, left, right):
63 self.op = str(op)
64 self.left = left
65 self.right = right
66
67class ForLoop:
68 def __init__(self, name, items, cmds):
69 self.name = str(name)
70 self.items = list(items)
71 self.cmds = list(cmds)
72
73class WhileLoop:
74 def __init__(self, condition, cmds):
75 self.condition = list(condition)
76 self.cmds = list(cmds)
77
78class UntilLoop:
79 def __init__(self, condition, cmds):
80 self.condition = list(condition)
81 self.cmds = list(cmds)
82
83class FunDef:
84 def __init__(self, name, body):
85 self.name = str(name)
86 self.body = body
87
88class BraceGroup:
89 def __init__(self, cmds):
90 self.cmds = list(cmds)
91
92class IfCond:
93 def __init__(self, cond, if_cmds, else_cmds):
94 self.cond = list(cond)
95 self.if_cmds = if_cmds
96 self.else_cmds = else_cmds
97
98class Case:
99 def __init__(self, name, items):
100 self.name = name
101 self.items = items
102
103class SubShell:
104 def __init__(self, cmds):
105 self.cmds = cmds
106
107class RedirectList:
108 def __init__(self, cmd, redirs):
109 self.cmd = cmd
110 self.redirs = list(redirs)
111
112def get_production(productions, ptype):
113 """productions must be a list of production tuples like (name, obj) where
114 name is the production string identifier.
115 Return the first production named 'ptype'. Raise KeyError if None can be
116 found.
117 """
118 for production in productions:
119 if production is not None and production[0]==ptype:
120 return production
121 raise KeyError(ptype)
122
123#-------------------------------------------------------------------------------
124# PLY grammar definition
125#-------------------------------------------------------------------------------
126
127def p_multiple_commands(p):
128 """multiple_commands : newline_sequence
129 | complete_command
130 | multiple_commands complete_command"""
131 if len(p)==2:
132 if p[1] is not None:
133 p[0] = [p[1]]
134 else:
135 p[0] = []
136 else:
137 p[0] = p[1] + [p[2]]
138
139def p_complete_command(p):
140 """complete_command : list separator
141 | list"""
142 if len(p)==3 and p[2] and p[2][1] == '&':
143 p[0] = ('async', p[1])
144 else:
145 p[0] = p[1]
146
147def p_list(p):
148 """list : list separator_op and_or
149 | and_or"""
150 if len(p)==2:
151 p[0] = [p[1]]
152 else:
153 #if p[2]!=';':
154 # raise NotImplementedError('AND-OR list asynchronous execution is not implemented')
155 p[0] = p[1] + [p[3]]
156
157def p_and_or(p):
158 """and_or : pipeline
159 | and_or AND_IF linebreak pipeline
160 | and_or OR_IF linebreak pipeline"""
161 if len(p)==2:
162 p[0] = p[1]
163 else:
164 p[0] = ('and_or', AndOr(p[2], p[1], p[4]))
165
166def p_maybe_bang_word(p):
167 """maybe_bang_word : Bang"""
168 p[0] = ('maybe_bang_word', p[1])
169
170def p_pipeline(p):
171 """pipeline : pipe_sequence
172 | bang_word pipe_sequence"""
173 if len(p)==3:
174 p[0] = ('pipeline', Pipeline(p[2][1:], True))
175 else:
176 p[0] = ('pipeline', Pipeline(p[1][1:]))
177
178def p_pipe_sequence(p):
179 """pipe_sequence : command
180 | pipe_sequence PIPE linebreak command"""
181 if len(p)==2:
182 p[0] = ['pipe_sequence', p[1]]
183 else:
184 p[0] = p[1] + [p[4]]
185
186def p_command(p):
187 """command : simple_command
188 | compound_command
189 | compound_command redirect_list
190 | function_definition"""
191
192 if p[1][0] in ( 'simple_command',
193 'for_clause',
194 'while_clause',
195 'until_clause',
196 'case_clause',
197 'if_clause',
198 'function_definition',
199 'subshell',
200 'brace_group',):
201 if len(p) == 2:
202 p[0] = p[1]
203 else:
204 p[0] = ('redirect_list', RedirectList(p[1], p[2][1:]))
205 else:
206 raise NotImplementedError('%s command is not implemented' % repr(p[1][0]))
207
208def p_compound_command(p):
209 """compound_command : brace_group
210 | subshell
211 | for_clause
212 | case_clause
213 | if_clause
214 | while_clause
215 | until_clause"""
216 p[0] = p[1]
217
218def p_subshell(p):
219 """subshell : LPARENS compound_list RPARENS"""
220 p[0] = ('subshell', SubShell(p[2][1:]))
221
222def p_compound_list(p):
223 """compound_list : term
224 | newline_list term
225 | term separator
226 | newline_list term separator"""
227 productions = p[1:]
228 try:
229 sep = get_production(productions, 'separator')
230 if sep[1]!=';':
231 raise NotImplementedError()
232 except KeyError:
233 pass
234 term = get_production(productions, 'term')
235 p[0] = ['compound_list'] + term[1:]
236
237def p_term(p):
238 """term : term separator and_or
239 | and_or"""
240 if len(p)==2:
241 p[0] = ['term', p[1]]
242 else:
243 if p[2] is not None and p[2][1] == '&':
244 p[0] = ['term', ('async', p[1][1:])] + [p[3]]
245 else:
246 p[0] = p[1] + [p[3]]
247
248def p_maybe_for_word(p):
249 # Rearrange 'For' priority wrt TOKEN. See p_for_word
250 """maybe_for_word : For"""
251 p[0] = ('maybe_for_word', p[1])
252
253def p_for_clause(p):
254 """for_clause : for_word name linebreak do_group
255 | for_word name linebreak in sequential_sep do_group
256 | for_word name linebreak in wordlist sequential_sep do_group"""
257 productions = p[1:]
258 do_group = get_production(productions, 'do_group')
259 try:
260 items = get_production(productions, 'in')[1:]
261 except KeyError:
262 raise NotImplementedError('"in" omission is not implemented')
263
264 try:
265 items = get_production(productions, 'wordlist')[1:]
266 except KeyError:
267 items = []
268
269 name = p[2]
270 p[0] = ('for_clause', ForLoop(name, items, do_group[1:]))
271
272def p_name(p):
273 """name : token""" #Was NAME instead of token
274 p[0] = p[1]
275
276def p_in(p):
277 """in : In"""
278 p[0] = ('in', p[1])
279
280def p_wordlist(p):
281 """wordlist : wordlist token
282 | token"""
283 if len(p)==2:
284 p[0] = ['wordlist', ('TOKEN', p[1])]
285 else:
286 p[0] = p[1] + [('TOKEN', p[2])]
287
288def p_case_clause(p):
289 """case_clause : Case token linebreak in linebreak case_list Esac
290 | Case token linebreak in linebreak case_list_ns Esac
291 | Case token linebreak in linebreak Esac"""
292 if len(p) < 8:
293 items = []
294 else:
295 items = p[6][1:]
296 name = p[2]
297 p[0] = ('case_clause', Case(name, [c[1] for c in items]))
298
299def p_case_list_ns(p):
300 """case_list_ns : case_list case_item_ns
301 | case_item_ns"""
302 p_case_list(p)
303
304def p_case_list(p):
305 """case_list : case_list case_item
306 | case_item"""
307 if len(p)==2:
308 p[0] = ['case_list', p[1]]
309 else:
310 p[0] = p[1] + [p[2]]
311
312def p_case_item_ns(p):
313 """case_item_ns : pattern RPARENS linebreak
314 | pattern RPARENS compound_list linebreak
315 | LPARENS pattern RPARENS linebreak
316 | LPARENS pattern RPARENS compound_list linebreak"""
317 p_case_item(p)
318
319def p_case_item(p):
320 """case_item : pattern RPARENS linebreak DSEMI linebreak
321 | pattern RPARENS compound_list DSEMI linebreak
322 | LPARENS pattern RPARENS linebreak DSEMI linebreak
323 | LPARENS pattern RPARENS compound_list DSEMI linebreak"""
324 if len(p) < 7:
325 name = p[1][1:]
326 else:
327 name = p[2][1:]
328
329 try:
330 cmds = get_production(p[1:], "compound_list")[1:]
331 except KeyError:
332 cmds = []
333
334 p[0] = ('case_item', (name, cmds))
335
336def p_pattern(p):
337 """pattern : token
338 | pattern PIPE token"""
339 if len(p)==2:
340 p[0] = ['pattern', ('TOKEN', p[1])]
341 else:
342 p[0] = p[1] + [('TOKEN', p[2])]
343
344def p_maybe_if_word(p):
345 # Rearrange 'If' priority wrt TOKEN. See p_if_word
346 """maybe_if_word : If"""
347 p[0] = ('maybe_if_word', p[1])
348
349def p_maybe_then_word(p):
350 # Rearrange 'Then' priority wrt TOKEN. See p_then_word
351 """maybe_then_word : Then"""
352 p[0] = ('maybe_then_word', p[1])
353
354def p_if_clause(p):
355 """if_clause : if_word compound_list then_word compound_list else_part Fi
356 | if_word compound_list then_word compound_list Fi"""
357 else_part = []
358 if len(p)==7:
359 else_part = p[5]
360 p[0] = ('if_clause', IfCond(p[2][1:], p[4][1:], else_part))
361
362def p_else_part(p):
363 """else_part : Elif compound_list then_word compound_list else_part
364 | Elif compound_list then_word compound_list
365 | Else compound_list"""
366 if len(p)==3:
367 p[0] = p[2][1:]
368 else:
369 else_part = []
370 if len(p)==6:
371 else_part = p[5]
372 p[0] = ('elif', IfCond(p[2][1:], p[4][1:], else_part))
373
374def p_while_clause(p):
375 """while_clause : While compound_list do_group"""
376 p[0] = ('while_clause', WhileLoop(p[2][1:], p[3][1:]))
377
378def p_maybe_until_word(p):
379 # Rearrange 'Until' priority wrt TOKEN. See p_until_word
380 """maybe_until_word : Until"""
381 p[0] = ('maybe_until_word', p[1])
382
383def p_until_clause(p):
384 """until_clause : until_word compound_list do_group"""
385 p[0] = ('until_clause', UntilLoop(p[2][1:], p[3][1:]))
386
387def p_function_definition(p):
388 """function_definition : fname LPARENS RPARENS linebreak function_body"""
389 p[0] = ('function_definition', FunDef(p[1], p[5]))
390
391def p_function_body(p):
392 """function_body : compound_command
393 | compound_command redirect_list"""
394 if len(p)!=2:
395 raise NotImplementedError('functions redirections lists are not implemented')
396 p[0] = p[1]
397
398def p_fname(p):
399 """fname : TOKEN""" #Was NAME instead of token
400 p[0] = p[1]
401
402def p_brace_group(p):
403 """brace_group : Lbrace compound_list Rbrace"""
404 p[0] = ('brace_group', BraceGroup(p[2][1:]))
405
406def p_maybe_done_word(p):
407 #See p_assignment_word for details.
408 """maybe_done_word : Done"""
409 p[0] = ('maybe_done_word', p[1])
410
411def p_maybe_do_word(p):
412 """maybe_do_word : Do"""
413 p[0] = ('maybe_do_word', p[1])
414
415def p_do_group(p):
416 """do_group : do_word compound_list done_word"""
417 #Do group contains a list of AndOr
418 p[0] = ['do_group'] + p[2][1:]
419
420def p_simple_command(p):
421 """simple_command : cmd_prefix cmd_word cmd_suffix
422 | cmd_prefix cmd_word
423 | cmd_prefix
424 | cmd_name cmd_suffix
425 | cmd_name"""
426 words, redirs, assigns = [], [], []
427 for e in p[1:]:
428 name = e[0]
429 if name in ('cmd_prefix', 'cmd_suffix'):
430 for sube in e[1:]:
431 subname = sube[0]
432 if subname=='io_redirect':
433 redirs.append(make_io_redirect(sube))
434 elif subname=='ASSIGNMENT_WORD':
435 assigns.append(sube)
436 else:
437 words.append(sube)
438 elif name in ('cmd_word', 'cmd_name'):
439 words.append(e)
440
441 cmd = SimpleCommand(words, redirs, assigns)
442 p[0] = ('simple_command', cmd)
443
444def p_cmd_name(p):
445 """cmd_name : TOKEN"""
446 p[0] = ('cmd_name', p[1])
447
448def p_cmd_word(p):
449 """cmd_word : token"""
450 p[0] = ('cmd_word', p[1])
451
452def p_maybe_assignment_word(p):
453 #See p_assignment_word for details.
454 """maybe_assignment_word : ASSIGNMENT_WORD"""
455 p[0] = ('maybe_assignment_word', p[1])
456
457def p_cmd_prefix(p):
458 """cmd_prefix : io_redirect
459 | cmd_prefix io_redirect
460 | assignment_word
461 | cmd_prefix assignment_word"""
462 try:
463 prefix = get_production(p[1:], 'cmd_prefix')
464 except KeyError:
465 prefix = ['cmd_prefix']
466
467 try:
468 value = get_production(p[1:], 'assignment_word')[1]
469 value = ('ASSIGNMENT_WORD', value.split('=', 1))
470 except KeyError:
471 value = get_production(p[1:], 'io_redirect')
472 p[0] = prefix + [value]
473
474def p_cmd_suffix(p):
475 """cmd_suffix : io_redirect
476 | cmd_suffix io_redirect
477 | token
478 | cmd_suffix token
479 | maybe_for_word
480 | cmd_suffix maybe_for_word
481 | maybe_done_word
482 | cmd_suffix maybe_done_word
483 | maybe_do_word
484 | cmd_suffix maybe_do_word
485 | maybe_until_word
486 | cmd_suffix maybe_until_word
487 | maybe_assignment_word
488 | cmd_suffix maybe_assignment_word
489 | maybe_if_word
490 | cmd_suffix maybe_if_word
491 | maybe_then_word
492 | cmd_suffix maybe_then_word
493 | maybe_bang_word
494 | cmd_suffix maybe_bang_word"""
495 try:
496 suffix = get_production(p[1:], 'cmd_suffix')
497 token = p[2]
498 except KeyError:
499 suffix = ['cmd_suffix']
500 token = p[1]
501
502 if isinstance(token, tuple):
503 if token[0]=='io_redirect':
504 p[0] = suffix + [token]
505 else:
506 #Convert maybe_* to TOKEN if necessary
507 p[0] = suffix + [('TOKEN', token[1])]
508 else:
509 p[0] = suffix + [('TOKEN', token)]
510
511def p_redirect_list(p):
512 """redirect_list : io_redirect
513 | redirect_list io_redirect"""
514 if len(p) == 2:
515 p[0] = ['redirect_list', make_io_redirect(p[1])]
516 else:
517 p[0] = p[1] + [make_io_redirect(p[2])]
518
519def p_io_redirect(p):
520 """io_redirect : io_file
521 | IO_NUMBER io_file
522 | io_here
523 | IO_NUMBER io_here"""
524 if len(p)==3:
525 p[0] = ('io_redirect', p[1], p[2])
526 else:
527 p[0] = ('io_redirect', None, p[1])
528
529def p_io_file(p):
530 #Return the tuple (operator, filename)
531 """io_file : LESS filename
532 | LESSAND filename
533 | GREATER filename
534 | GREATAND filename
535 | DGREAT filename
536 | LESSGREAT filename
537 | CLOBBER filename"""
538 #Extract the filename from the file
539 p[0] = ('io_file', p[1], p[2][1])
540
541def p_filename(p):
542 #Return the filename
543 """filename : TOKEN"""
544 p[0] = ('filename', p[1])
545
546def p_io_here(p):
547 """io_here : DLESS here_end
548 | DLESSDASH here_end"""
549 p[0] = ('io_here', p[1], p[2][1], p[2][2])
550
551def p_here_end(p):
552 """here_end : HERENAME TOKEN"""
553 p[0] = ('here_document', p[1], p[2])
554
555def p_newline_sequence(p):
556 # Nothing in the grammar can handle leading NEWLINE productions, so add
557 # this one with the lowest possible priority relatively to newline_list.
558 """newline_sequence : newline_list"""
559 p[0] = None
560
561def p_newline_list(p):
562 """newline_list : NEWLINE
563 | newline_list NEWLINE"""
564 p[0] = None
565
566def p_linebreak(p):
567 """linebreak : newline_list
568 | empty"""
569 p[0] = None
570
571def p_separator_op(p):
572 """separator_op : COMMA
573 | AMP"""
574 p[0] = p[1]
575
576def p_separator(p):
577 """separator : separator_op linebreak
578 | newline_list"""
579 if len(p)==2:
580 #Ignore newlines
581 p[0] = None
582 else:
583 #Keep the separator operator
584 p[0] = ('separator', p[1])
585
586def p_sequential_sep(p):
587 """sequential_sep : COMMA linebreak
588 | newline_list"""
589 p[0] = None
590
591# Low priority TOKEN => for_word conversion.
592# Let maybe_for_word be used as a token when necessary in higher priority
593# rules.
594def p_for_word(p):
595 """for_word : maybe_for_word"""
596 p[0] = p[1]
597
598def p_if_word(p):
599 """if_word : maybe_if_word"""
600 p[0] = p[1]
601
602def p_then_word(p):
603 """then_word : maybe_then_word"""
604 p[0] = p[1]
605
606def p_done_word(p):
607 """done_word : maybe_done_word"""
608 p[0] = p[1]
609
610def p_do_word(p):
611 """do_word : maybe_do_word"""
612 p[0] = p[1]
613
614def p_until_word(p):
615 """until_word : maybe_until_word"""
616 p[0] = p[1]
617
618def p_assignment_word(p):
619 """assignment_word : maybe_assignment_word"""
620 p[0] = ('assignment_word', p[1][1])
621
622def p_bang_word(p):
623 """bang_word : maybe_bang_word"""
624 p[0] = ('bang_word', p[1][1])
625
626def p_token(p):
627 """token : TOKEN
628 | Fi"""
629 p[0] = p[1]
630
631def p_empty(p):
632 'empty :'
633 p[0] = None
634
635# Error rule for syntax errors
636def p_error(p):
637 msg = []
638 w = msg.append
639 w('%r\n' % p)
640 w('followed by:\n')
641 for i in range(5):
642 n = yacc.token()
643 if not n:
644 break
645 w(' %r\n' % n)
646 raise sherrors.ShellSyntaxError(''.join(msg))
647
648# Build the parser
649try:
650 import pyshtables
651except ImportError:
652 outputdir = os.path.dirname(__file__)
653 if not os.access(outputdir, os.W_OK):
654 outputdir = ''
655 yacc.yacc(tabmodule = 'pyshtables', outputdir = outputdir, debug = 0)
656else:
657 yacc.yacc(tabmodule = 'pysh.pyshtables', write_tables = 0, debug = 0)
658
659
660def parse(input, eof=False, debug=False):
661 """Parse a whole script at once and return the generated AST and unconsumed
662 data in a tuple.
663
664 NOTE: eof is probably meaningless for now, the parser being unable to work
665 in pull mode. It should be set to True.
666 """
667 lexer = pyshlex.PLYLexer()
668 remaining = lexer.add(input, eof)
669 if lexer.is_empty():
670 return [], remaining
671 if debug:
672 debug = 2
673 return yacc.parse(lexer=lexer, debug=debug), remaining
674
675#-------------------------------------------------------------------------------
676# AST rendering helpers
677#-------------------------------------------------------------------------------
678
679def format_commands(v):
680 """Return a tree made of strings and lists. Make command trees easier to
681 display.
682 """
683 if isinstance(v, list):
684 return [format_commands(c) for c in v]
685 if isinstance(v, tuple):
686 if len(v)==2 and isinstance(v[0], str) and not isinstance(v[1], str):
687 if v[0] == 'async':
688 return ['AsyncList', map(format_commands, v[1])]
689 else:
690 #Avoid decomposing tuples like ('pipeline', Pipeline(...))
691 return format_commands(v[1])
692 return format_commands(list(v))
693 elif isinstance(v, IfCond):
694 name = ['IfCond']
695 name += ['if', map(format_commands, v.cond)]
696 name += ['then', map(format_commands, v.if_cmds)]
697 name += ['else', map(format_commands, v.else_cmds)]
698 return name
699 elif isinstance(v, ForLoop):
700 name = ['ForLoop']
701 name += [repr(v.name)+' in ', map(str, v.items)]
702 name += ['commands', map(format_commands, v.cmds)]
703 return name
704 elif isinstance(v, AndOr):
705 return [v.op, format_commands(v.left), format_commands(v.right)]
706 elif isinstance(v, Pipeline):
707 name = 'Pipeline'
708 if v.reverse_status:
709 name = '!' + name
710 return [name, format_commands(v.commands)]
711 elif isinstance(v, Case):
712 name = ['Case']
713 name += [v.name, format_commands(v.items)]
714 elif isinstance(v, SimpleCommand):
715 name = ['SimpleCommand']
716 if v.words:
717 name += ['words', map(str, v.words)]
718 if v.assigns:
719 assigns = [tuple(a[1]) for a in v.assigns]
720 name += ['assigns', map(str, assigns)]
721 if v.redirs:
722 name += ['redirs', map(format_commands, v.redirs)]
723 return name
724 elif isinstance(v, RedirectList):
725 name = ['RedirectList']
726 if v.redirs:
727 name += ['redirs', map(format_commands, v.redirs)]
728 name += ['command', format_commands(v.cmd)]
729 return name
730 elif isinstance(v, IORedirect):
731 return ' '.join(map(str, (v.io_number, v.op, v.filename)))
732 elif isinstance(v, HereDocument):
733 return ' '.join(map(str, (v.io_number, v.op, repr(v.name), repr(v.content))))
734 elif isinstance(v, SubShell):
735 return ['SubShell', map(format_commands, v.cmds)]
736 else:
737 return repr(v)
738
739def print_commands(cmds, output=sys.stdout):
740 """Pretty print a command tree."""
741 def print_tree(cmd, spaces, output):
742 if isinstance(cmd, list):
743 for c in cmd:
744 print_tree(c, spaces + 3, output)
745 else:
746 print >>output, ' '*spaces + str(cmd)
747
748 formatted = format_commands(cmds)
749 print_tree(formatted, 0, output)
750
751
752def stringify_commands(cmds):
753 """Serialize a command tree as a string.
754
755 Returned string is not pretty and is currently used for unit tests only.
756 """
757 def stringify(value):
758 output = []
759 if isinstance(value, list):
760 formatted = []
761 for v in value:
762 formatted.append(stringify(v))
763 formatted = ' '.join(formatted)
764 output.append(''.join(['<', formatted, '>']))
765 else:
766 output.append(value)
767 return ' '.join(output)
768
769 return stringify(format_commands(cmds))
770
771
772def visit_commands(cmds, callable):
773 """Visit the command tree and execute callable on every Pipeline and
774 SimpleCommand instances.
775 """
776 if isinstance(cmds, (tuple, list)):
777 map(lambda c: visit_commands(c,callable), cmds)
778 elif isinstance(cmds, (Pipeline, SimpleCommand)):
779 callable(cmds)
diff --git a/bitbake/lib/bb/pysh/sherrors.py b/bitbake/lib/bb/pysh/sherrors.py
new file mode 100644
index 0000000000..49d0533de2
--- /dev/null
+++ b/bitbake/lib/bb/pysh/sherrors.py
@@ -0,0 +1,41 @@
1# sherrors.py - shell errors and signals
2#
3# Copyright 2007 Patrick Mezard
4#
5# This software may be used and distributed according to the terms
6# of the GNU General Public License, incorporated herein by reference.
7
8"""Define shell exceptions and error codes.
9"""
10
11class ShellError(Exception):
12 pass
13
14class ShellSyntaxError(ShellError):
15 pass
16
17class UtilityError(ShellError):
18 """Raised upon utility syntax error (option or operand error)."""
19 pass
20
21class ExpansionError(ShellError):
22 pass
23
24class CommandNotFound(ShellError):
25 """Specified command was not found."""
26 pass
27
28class RedirectionError(ShellError):
29 pass
30
31class VarAssignmentError(ShellError):
32 """Variable assignment error."""
33 pass
34
35class ExitSignal(ShellError):
36 """Exit signal."""
37 pass
38
39class ReturnSignal(ShellError):
40 """Exit signal."""
41 pass
diff --git a/bitbake/lib/bb/pysh/subprocess_fix.py b/bitbake/lib/bb/pysh/subprocess_fix.py
new file mode 100644
index 0000000000..46eca22802
--- /dev/null
+++ b/bitbake/lib/bb/pysh/subprocess_fix.py
@@ -0,0 +1,77 @@
1# subprocess - Subprocesses with accessible I/O streams
2#
3# For more information about this module, see PEP 324.
4#
5# This module should remain compatible with Python 2.2, see PEP 291.
6#
7# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
8#
9# Licensed to PSF under a Contributor Agreement.
10# See http://www.python.org/2.4/license for licensing details.
11
12def list2cmdline(seq):
13 """
14 Translate a sequence of arguments into a command line
15 string, using the same rules as the MS C runtime:
16
17 1) Arguments are delimited by white space, which is either a
18 space or a tab.
19
20 2) A string surrounded by double quotation marks is
21 interpreted as a single argument, regardless of white space
22 contained within. A quoted string can be embedded in an
23 argument.
24
25 3) A double quotation mark preceded by a backslash is
26 interpreted as a literal double quotation mark.
27
28 4) Backslashes are interpreted literally, unless they
29 immediately precede a double quotation mark.
30
31 5) If backslashes immediately precede a double quotation mark,
32 every pair of backslashes is interpreted as a literal
33 backslash. If the number of backslashes is odd, the last
34 backslash escapes the next double quotation mark as
35 described in rule 3.
36 """
37
38 # See
39 # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
40 result = []
41 needquote = False
42 for arg in seq:
43 bs_buf = []
44
45 # Add a space to separate this argument from the others
46 if result:
47 result.append(' ')
48
49 needquote = (" " in arg) or ("\t" in arg) or ("|" in arg) or arg == ""
50 if needquote:
51 result.append('"')
52
53 for c in arg:
54 if c == '\\':
55 # Don't know if we need to double yet.
56 bs_buf.append(c)
57 elif c == '"':
58 # Double backspaces.
59 result.append('\\' * len(bs_buf)*2)
60 bs_buf = []
61 result.append('\\"')
62 else:
63 # Normal char
64 if bs_buf:
65 result.extend(bs_buf)
66 bs_buf = []
67 result.append(c)
68
69 # Add remaining backspaces, if any.
70 if bs_buf:
71 result.extend(bs_buf)
72
73 if needquote:
74 result.extend(bs_buf)
75 result.append('"')
76
77 return ''.join(result)
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
new file mode 100644
index 0000000000..90c610695f
--- /dev/null
+++ b/bitbake/lib/bb/runqueue.py
@@ -0,0 +1,2172 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake 'RunQueue' implementation
6
7Handles preparation and execution of a queue of tasks
8"""
9
10# Copyright (C) 2006-2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import copy
26import os
27import sys
28import signal
29import stat
30import fcntl
31import errno
32import logging
33import re
34import bb
35from bb import msg, data, event
36from bb import monitordisk
37import subprocess
38
39try:
40 import cPickle as pickle
41except ImportError:
42 import pickle
43
44bblogger = logging.getLogger("BitBake")
45logger = logging.getLogger("BitBake.RunQueue")
46
47__find_md5__ = re.compile( r'(?i)(?<![a-z0-9])[a-f0-9]{32}(?![a-z0-9])' )
48
49class RunQueueStats:
50 """
51 Holds statistics on the tasks handled by the associated runQueue
52 """
53 def __init__(self, total):
54 self.completed = 0
55 self.skipped = 0
56 self.failed = 0
57 self.active = 0
58 self.total = total
59
60 def copy(self):
61 obj = self.__class__(self.total)
62 obj.__dict__.update(self.__dict__)
63 return obj
64
65 def taskFailed(self):
66 self.active = self.active - 1
67 self.failed = self.failed + 1
68
69 def taskCompleted(self, number = 1):
70 self.active = self.active - number
71 self.completed = self.completed + number
72
73 def taskSkipped(self, number = 1):
74 self.active = self.active + number
75 self.skipped = self.skipped + number
76
77 def taskActive(self):
78 self.active = self.active + 1
79
80# These values indicate the next step due to be run in the
81# runQueue state machine
82runQueuePrepare = 2
83runQueueSceneInit = 3
84runQueueSceneRun = 4
85runQueueRunInit = 5
86runQueueRunning = 6
87runQueueFailed = 7
88runQueueCleanUp = 8
89runQueueComplete = 9
90
91class RunQueueScheduler(object):
92 """
93 Control the order tasks are scheduled in.
94 """
95 name = "basic"
96
97 def __init__(self, runqueue, rqdata):
98 """
99 The default scheduler just returns the first buildable task (the
100 priority map is sorted by task number)
101 """
102 self.rq = runqueue
103 self.rqdata = rqdata
104 self.numTasks = len(self.rqdata.runq_fnid)
105
106 self.prio_map = []
107 self.prio_map.extend(range(self.numTasks))
108
109 self.buildable = []
110 self.stamps = {}
111 for taskid in xrange(self.numTasks):
112 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[taskid]]
113 taskname = self.rqdata.runq_task[taskid]
114 self.stamps[taskid] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
115 if self.rq.runq_buildable[taskid] == 1:
116 self.buildable.append(taskid)
117
118 self.rev_prio_map = None
119
120 def next_buildable_task(self):
121 """
122 Return the id of the first task we find that is buildable
123 """
124 self.buildable = [x for x in self.buildable if not self.rq.runq_running[x] == 1]
125 if not self.buildable:
126 return None
127 if len(self.buildable) == 1:
128 taskid = self.buildable[0]
129 stamp = self.stamps[taskid]
130 if stamp not in self.rq.build_stamps.itervalues():
131 return taskid
132
133 if not self.rev_prio_map:
134 self.rev_prio_map = range(self.numTasks)
135 for taskid in xrange(self.numTasks):
136 self.rev_prio_map[self.prio_map[taskid]] = taskid
137
138 best = None
139 bestprio = None
140 for taskid in self.buildable:
141 prio = self.rev_prio_map[taskid]
142 if bestprio is None or bestprio > prio:
143 stamp = self.stamps[taskid]
144 if stamp in self.rq.build_stamps.itervalues():
145 continue
146 bestprio = prio
147 best = taskid
148
149 return best
150
151 def next(self):
152 """
153 Return the id of the task we should build next
154 """
155 if self.rq.stats.active < self.rq.number_tasks:
156 return self.next_buildable_task()
157
158 def newbuilable(self, task):
159 self.buildable.append(task)
160
161class RunQueueSchedulerSpeed(RunQueueScheduler):
162 """
163 A scheduler optimised for speed. The priority map is sorted by task weight,
164 heavier weighted tasks (tasks needed by the most other tasks) are run first.
165 """
166 name = "speed"
167
168 def __init__(self, runqueue, rqdata):
169 """
170 The priority map is sorted by task weight.
171 """
172 RunQueueScheduler.__init__(self, runqueue, rqdata)
173
174 sortweight = sorted(copy.deepcopy(self.rqdata.runq_weight))
175 copyweight = copy.deepcopy(self.rqdata.runq_weight)
176 self.prio_map = []
177
178 for weight in sortweight:
179 idx = copyweight.index(weight)
180 self.prio_map.append(idx)
181 copyweight[idx] = -1
182
183 self.prio_map.reverse()
184
185class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
186 """
187 A scheduler optimised to complete .bb files are quickly as possible. The
188 priority map is sorted by task weight, but then reordered so once a given
189 .bb file starts to build, it's completed as quickly as possible. This works
190 well where disk space is at a premium and classes like OE's rm_work are in
191 force.
192 """
193 name = "completion"
194
195 def __init__(self, runqueue, rqdata):
196 RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata)
197
198 #FIXME - whilst this groups all fnids together it does not reorder the
199 #fnid groups optimally.
200
201 basemap = copy.deepcopy(self.prio_map)
202 self.prio_map = []
203 while (len(basemap) > 0):
204 entry = basemap.pop(0)
205 self.prio_map.append(entry)
206 fnid = self.rqdata.runq_fnid[entry]
207 todel = []
208 for entry in basemap:
209 entry_fnid = self.rqdata.runq_fnid[entry]
210 if entry_fnid == fnid:
211 todel.append(basemap.index(entry))
212 self.prio_map.append(entry)
213 todel.reverse()
214 for idx in todel:
215 del basemap[idx]
216
217class RunQueueData:
218 """
219 BitBake Run Queue implementation
220 """
221 def __init__(self, rq, cooker, cfgData, dataCache, taskData, targets):
222 self.cooker = cooker
223 self.dataCache = dataCache
224 self.taskData = taskData
225 self.targets = targets
226 self.rq = rq
227 self.warn_multi_bb = False
228
229 self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", True) or ""
230 self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split()
231
232 self.reset()
233
234 def reset(self):
235 self.runq_fnid = []
236 self.runq_task = []
237 self.runq_depends = []
238 self.runq_revdeps = []
239 self.runq_hash = []
240
241 def runq_depends_names(self, ids):
242 import re
243 ret = []
244 for id in self.runq_depends[ids]:
245 nam = os.path.basename(self.get_user_idstring(id))
246 nam = re.sub("_[^,]*,", ",", nam)
247 ret.extend([nam])
248 return ret
249
250 def get_task_name(self, task):
251 return self.runq_task[task]
252
253 def get_task_file(self, task):
254 return self.taskData.fn_index[self.runq_fnid[task]]
255
256 def get_task_hash(self, task):
257 return self.runq_hash[task]
258
259 def get_user_idstring(self, task, task_name_suffix = ""):
260 fn = self.taskData.fn_index[self.runq_fnid[task]]
261 taskname = self.runq_task[task] + task_name_suffix
262 return "%s, %s" % (fn, taskname)
263
264 def get_task_id(self, fnid, taskname):
265 for listid in xrange(len(self.runq_fnid)):
266 if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
267 return listid
268 return None
269
270 def circular_depchains_handler(self, tasks):
271 """
272 Some tasks aren't buildable, likely due to circular dependency issues.
273 Identify the circular dependencies and print them in a user readable format.
274 """
275 from copy import deepcopy
276
277 valid_chains = []
278 explored_deps = {}
279 msgs = []
280
281 def chain_reorder(chain):
282 """
283 Reorder a dependency chain so the lowest task id is first
284 """
285 lowest = 0
286 new_chain = []
287 for entry in xrange(len(chain)):
288 if chain[entry] < chain[lowest]:
289 lowest = entry
290 new_chain.extend(chain[lowest:])
291 new_chain.extend(chain[:lowest])
292 return new_chain
293
294 def chain_compare_equal(chain1, chain2):
295 """
296 Compare two dependency chains and see if they're the same
297 """
298 if len(chain1) != len(chain2):
299 return False
300 for index in xrange(len(chain1)):
301 if chain1[index] != chain2[index]:
302 return False
303 return True
304
305 def chain_array_contains(chain, chain_array):
306 """
307 Return True if chain_array contains chain
308 """
309 for ch in chain_array:
310 if chain_compare_equal(ch, chain):
311 return True
312 return False
313
314 def find_chains(taskid, prev_chain):
315 prev_chain.append(taskid)
316 total_deps = []
317 total_deps.extend(self.runq_revdeps[taskid])
318 for revdep in self.runq_revdeps[taskid]:
319 if revdep in prev_chain:
320 idx = prev_chain.index(revdep)
321 # To prevent duplicates, reorder the chain to start with the lowest taskid
322 # and search through an array of those we've already printed
323 chain = prev_chain[idx:]
324 new_chain = chain_reorder(chain)
325 if not chain_array_contains(new_chain, valid_chains):
326 valid_chains.append(new_chain)
327 msgs.append("Dependency loop #%d found:\n" % len(valid_chains))
328 for dep in new_chain:
329 msgs.append(" Task %s (%s) (dependent Tasks %s)\n" % (dep, self.get_user_idstring(dep), self.runq_depends_names(dep)))
330 msgs.append("\n")
331 if len(valid_chains) > 10:
332 msgs.append("Aborted dependency loops search after 10 matches.\n")
333 return msgs
334 continue
335 scan = False
336 if revdep not in explored_deps:
337 scan = True
338 elif revdep in explored_deps[revdep]:
339 scan = True
340 else:
341 for dep in prev_chain:
342 if dep in explored_deps[revdep]:
343 scan = True
344 if scan:
345 find_chains(revdep, copy.deepcopy(prev_chain))
346 for dep in explored_deps[revdep]:
347 if dep not in total_deps:
348 total_deps.append(dep)
349
350 explored_deps[taskid] = total_deps
351
352 for task in tasks:
353 find_chains(task, [])
354
355 return msgs
356
357 def calculate_task_weights(self, endpoints):
358 """
359 Calculate a number representing the "weight" of each task. Heavier weighted tasks
360 have more dependencies and hence should be executed sooner for maximum speed.
361
362 This function also sanity checks the task list finding tasks that are not
363 possible to execute due to circular dependencies.
364 """
365
366 numTasks = len(self.runq_fnid)
367 weight = []
368 deps_left = []
369 task_done = []
370
371 for listid in xrange(numTasks):
372 task_done.append(False)
373 weight.append(1)
374 deps_left.append(len(self.runq_revdeps[listid]))
375
376 for listid in endpoints:
377 weight[listid] = 10
378 task_done[listid] = True
379
380 while True:
381 next_points = []
382 for listid in endpoints:
383 for revdep in self.runq_depends[listid]:
384 weight[revdep] = weight[revdep] + weight[listid]
385 deps_left[revdep] = deps_left[revdep] - 1
386 if deps_left[revdep] == 0:
387 next_points.append(revdep)
388 task_done[revdep] = True
389 endpoints = next_points
390 if len(next_points) == 0:
391 break
392
393 # Circular dependency sanity check
394 problem_tasks = []
395 for task in xrange(numTasks):
396 if task_done[task] is False or deps_left[task] != 0:
397 problem_tasks.append(task)
398 logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task))
399 logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[task], deps_left[task])
400
401 if problem_tasks:
402 message = "Unbuildable tasks were found.\n"
403 message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
404 message = message + "Identifying dependency loops (this may take a short while)...\n"
405 logger.error(message)
406
407 msgs = self.circular_depchains_handler(problem_tasks)
408
409 message = "\n"
410 for msg in msgs:
411 message = message + msg
412 bb.msg.fatal("RunQueue", message)
413
414 return weight
415
416 def prepare(self):
417 """
418 Turn a set of taskData into a RunQueue and compute data needed
419 to optimise the execution order.
420 """
421
422 runq_build = []
423 recursivetasks = {}
424 recursiveitasks = {}
425 recursivetasksselfref = set()
426
427 taskData = self.taskData
428
429 if len(taskData.tasks_name) == 0:
430 # Nothing to do
431 return 0
432
433 logger.info("Preparing runqueue")
434
435 # Step A - Work out a list of tasks to run
436 #
437 # Taskdata gives us a list of possible providers for every build and run
438 # target ordered by priority. It also gives information on each of those
439 # providers.
440 #
441 # To create the actual list of tasks to execute we fix the list of
442 # providers and then resolve the dependencies into task IDs. This
443 # process is repeated for each type of dependency (tdepends, deptask,
444 # rdeptast, recrdeptask, idepends).
445
446 def add_build_dependencies(depids, tasknames, depends):
447 for depid in depids:
448 # Won't be in build_targets if ASSUME_PROVIDED
449 if depid not in taskData.build_targets:
450 continue
451 depdata = taskData.build_targets[depid][0]
452 if depdata is None:
453 continue
454 for taskname in tasknames:
455 taskid = taskData.gettask_id_fromfnid(depdata, taskname)
456 if taskid is not None:
457 depends.add(taskid)
458
459 def add_runtime_dependencies(depids, tasknames, depends):
460 for depid in depids:
461 if depid not in taskData.run_targets:
462 continue
463 depdata = taskData.run_targets[depid][0]
464 if depdata is None:
465 continue
466 for taskname in tasknames:
467 taskid = taskData.gettask_id_fromfnid(depdata, taskname)
468 if taskid is not None:
469 depends.add(taskid)
470
471 def add_resolved_dependencies(depids, tasknames, depends):
472 for depid in depids:
473 for taskname in tasknames:
474 taskid = taskData.gettask_id_fromfnid(depid, taskname)
475 if taskid is not None:
476 depends.add(taskid)
477
478 for task in xrange(len(taskData.tasks_name)):
479 depends = set()
480 fnid = taskData.tasks_fnid[task]
481 fn = taskData.fn_index[fnid]
482 task_deps = self.dataCache.task_deps[fn]
483
484 #logger.debug(2, "Processing %s:%s", fn, taskData.tasks_name[task])
485
486 if fnid not in taskData.failed_fnids:
487
488 # Resolve task internal dependencies
489 #
490 # e.g. addtask before X after Y
491 depends = set(taskData.tasks_tdepends[task])
492
493 # Resolve 'deptask' dependencies
494 #
495 # e.g. do_sometask[deptask] = "do_someothertask"
496 # (makes sure sometask runs after someothertask of all DEPENDS)
497 if 'deptask' in task_deps and taskData.tasks_name[task] in task_deps['deptask']:
498 tasknames = task_deps['deptask'][taskData.tasks_name[task]].split()
499 add_build_dependencies(taskData.depids[fnid], tasknames, depends)
500
501 # Resolve 'rdeptask' dependencies
502 #
503 # e.g. do_sometask[rdeptask] = "do_someothertask"
504 # (makes sure sometask runs after someothertask of all RDEPENDS)
505 if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']:
506 tasknames = task_deps['rdeptask'][taskData.tasks_name[task]].split()
507 add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
508
509 # Resolve inter-task dependencies
510 #
511 # e.g. do_sometask[depends] = "targetname:do_someothertask"
512 # (makes sure sometask runs after targetname's someothertask)
513 idepends = taskData.tasks_idepends[task]
514 for (depid, idependtask) in idepends:
515 if depid in taskData.build_targets and not depid in taskData.failed_deps:
516 # Won't be in build_targets if ASSUME_PROVIDED
517 depdata = taskData.build_targets[depid][0]
518 if depdata is not None:
519 taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
520 if taskid is None:
521 bb.msg.fatal("RunQueue", "Task %s in %s depends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, taskData.fn_index[depdata]))
522 depends.add(taskid)
523 irdepends = taskData.tasks_irdepends[task]
524 for (depid, idependtask) in irdepends:
525 if depid in taskData.run_targets:
526 # Won't be in run_targets if ASSUME_PROVIDED
527 depdata = taskData.run_targets[depid][0]
528 if depdata is not None:
529 taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
530 if taskid is None:
531 bb.msg.fatal("RunQueue", "Task %s in %s rdepends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, taskData.fn_index[depdata]))
532 depends.add(taskid)
533
534 # Resolve recursive 'recrdeptask' dependencies (Part A)
535 #
536 # e.g. do_sometask[recrdeptask] = "do_someothertask"
537 # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
538 # We cover the recursive part of the dependencies below
539 if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']:
540 tasknames = task_deps['recrdeptask'][taskData.tasks_name[task]].split()
541 recursivetasks[task] = tasknames
542 add_build_dependencies(taskData.depids[fnid], tasknames, depends)
543 add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
544 if taskData.tasks_name[task] in tasknames:
545 recursivetasksselfref.add(task)
546
547 if 'recideptask' in task_deps and taskData.tasks_name[task] in task_deps['recideptask']:
548 recursiveitasks[task] = []
549 for t in task_deps['recideptask'][taskData.tasks_name[task]].split():
550 newdep = taskData.gettask_id_fromfnid(fnid, t)
551 recursiveitasks[task].append(newdep)
552
553 self.runq_fnid.append(taskData.tasks_fnid[task])
554 self.runq_task.append(taskData.tasks_name[task])
555 self.runq_depends.append(depends)
556 self.runq_revdeps.append(set())
557 self.runq_hash.append("")
558
559 runq_build.append(0)
560
561 # Resolve recursive 'recrdeptask' dependencies (Part B)
562 #
563 # e.g. do_sometask[recrdeptask] = "do_someothertask"
564 # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
565 # We need to do this separately since we need all of self.runq_depends to be complete before this is processed
566 extradeps = {}
567 for task in recursivetasks:
568 extradeps[task] = set(self.runq_depends[task])
569 tasknames = recursivetasks[task]
570 seendeps = set()
571 seenfnid = []
572
573 def generate_recdeps(t):
574 newdeps = set()
575 add_resolved_dependencies([taskData.tasks_fnid[t]], tasknames, newdeps)
576 extradeps[task].update(newdeps)
577 seendeps.add(t)
578 newdeps.add(t)
579 for i in newdeps:
580 for n in self.runq_depends[i]:
581 if n not in seendeps:
582 generate_recdeps(n)
583 generate_recdeps(task)
584
585 if task in recursiveitasks:
586 for dep in recursiveitasks[task]:
587 generate_recdeps(dep)
588
589 # Remove circular references so that do_a[recrdeptask] = "do_a do_b" can work
590 for task in recursivetasks:
591 extradeps[task].difference_update(recursivetasksselfref)
592
593 for task in xrange(len(taskData.tasks_name)):
594 # Add in extra dependencies
595 if task in extradeps:
596 self.runq_depends[task] = extradeps[task]
597 # Remove all self references
598 if task in self.runq_depends[task]:
599 logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], self.runq_depends[task])
600 self.runq_depends[task].remove(task)
601
602 # Step B - Mark all active tasks
603 #
604 # Start with the tasks we were asked to run and mark all dependencies
605 # as active too. If the task is to be 'forced', clear its stamp. Once
606 # all active tasks are marked, prune the ones we don't need.
607
608 logger.verbose("Marking Active Tasks")
609
610 def mark_active(listid, depth):
611 """
612 Mark an item as active along with its depends
613 (calls itself recursively)
614 """
615
616 if runq_build[listid] == 1:
617 return
618
619 runq_build[listid] = 1
620
621 depends = self.runq_depends[listid]
622 for depend in depends:
623 mark_active(depend, depth+1)
624
625 self.target_pairs = []
626 for target in self.targets:
627 targetid = taskData.getbuild_id(target[0])
628
629 if targetid not in taskData.build_targets:
630 continue
631
632 if targetid in taskData.failed_deps:
633 continue
634
635 fnid = taskData.build_targets[targetid][0]
636 fn = taskData.fn_index[fnid]
637 self.target_pairs.append((fn, target[1]))
638
639 if fnid in taskData.failed_fnids:
640 continue
641
642 if target[1] not in taskData.tasks_lookup[fnid]:
643 import difflib
644 close_matches = difflib.get_close_matches(target[1], taskData.tasks_lookup[fnid], cutoff=0.7)
645 if close_matches:
646 extra = ". Close matches:\n %s" % "\n ".join(close_matches)
647 else:
648 extra = ""
649 bb.msg.fatal("RunQueue", "Task %s does not exist for target %s%s" % (target[1], target[0], extra))
650
651 listid = taskData.tasks_lookup[fnid][target[1]]
652
653 mark_active(listid, 1)
654
655 # Step C - Prune all inactive tasks
656 #
657 # Once all active tasks are marked, prune the ones we don't need.
658
659 maps = []
660 delcount = 0
661 for listid in xrange(len(self.runq_fnid)):
662 if runq_build[listid-delcount] == 1:
663 maps.append(listid-delcount)
664 else:
665 del self.runq_fnid[listid-delcount]
666 del self.runq_task[listid-delcount]
667 del self.runq_depends[listid-delcount]
668 del runq_build[listid-delcount]
669 del self.runq_revdeps[listid-delcount]
670 del self.runq_hash[listid-delcount]
671 delcount = delcount + 1
672 maps.append(-1)
673
674 #
675 # Step D - Sanity checks and computation
676 #
677
678 # Check to make sure we still have tasks to run
679 if len(self.runq_fnid) == 0:
680 if not taskData.abort:
681 bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
682 else:
683 bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.")
684
685 logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runq_fnid))
686
687 # Remap the dependencies to account for the deleted tasks
688 # Check we didn't delete a task we depend on
689 for listid in xrange(len(self.runq_fnid)):
690 newdeps = []
691 origdeps = self.runq_depends[listid]
692 for origdep in origdeps:
693 if maps[origdep] == -1:
694 bb.msg.fatal("RunQueue", "Invalid mapping - Should never happen!")
695 newdeps.append(maps[origdep])
696 self.runq_depends[listid] = set(newdeps)
697
698 logger.verbose("Assign Weightings")
699
700 # Generate a list of reverse dependencies to ease future calculations
701 for listid in xrange(len(self.runq_fnid)):
702 for dep in self.runq_depends[listid]:
703 self.runq_revdeps[dep].add(listid)
704
705 # Identify tasks at the end of dependency chains
706 # Error on circular dependency loops (length two)
707 endpoints = []
708 for listid in xrange(len(self.runq_fnid)):
709 revdeps = self.runq_revdeps[listid]
710 if len(revdeps) == 0:
711 endpoints.append(listid)
712 for dep in revdeps:
713 if dep in self.runq_depends[listid]:
714 #self.dump_data(taskData)
715 bb.msg.fatal("RunQueue", "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
716
717 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
718
719 # Calculate task weights
720 # Check of higher length circular dependencies
721 self.runq_weight = self.calculate_task_weights(endpoints)
722
723 # Sanity Check - Check for multiple tasks building the same provider
724 prov_list = {}
725 seen_fn = []
726 for task in xrange(len(self.runq_fnid)):
727 fn = taskData.fn_index[self.runq_fnid[task]]
728 if fn in seen_fn:
729 continue
730 seen_fn.append(fn)
731 for prov in self.dataCache.fn_provides[fn]:
732 if prov not in prov_list:
733 prov_list[prov] = [fn]
734 elif fn not in prov_list[prov]:
735 prov_list[prov].append(fn)
736 for prov in prov_list:
737 if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
738 seen_pn = []
739 # If two versions of the same PN are being built its fatal, we don't support it.
740 for fn in prov_list[prov]:
741 pn = self.dataCache.pkg_fn[fn]
742 if pn not in seen_pn:
743 seen_pn.append(pn)
744 else:
745 bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn))
746 msg = "Multiple .bb files are due to be built which each provide %s (%s)." % (prov, " ".join(prov_list[prov]))
747 if self.warn_multi_bb:
748 logger.warn(msg)
749 else:
750 msg += "\n This usually means one provides something the other doesn't and should."
751 logger.error(msg)
752
753 # Create a whitelist usable by the stamp checks
754 stampfnwhitelist = []
755 for entry in self.stampwhitelist.split():
756 entryid = self.taskData.getbuild_id(entry)
757 if entryid not in self.taskData.build_targets:
758 continue
759 fnid = self.taskData.build_targets[entryid][0]
760 fn = self.taskData.fn_index[fnid]
761 stampfnwhitelist.append(fn)
762 self.stampfnwhitelist = stampfnwhitelist
763
764 # Iterate over the task list looking for tasks with a 'setscene' function
765 self.runq_setscene = []
766 if not self.cooker.configuration.nosetscene:
767 for task in range(len(self.runq_fnid)):
768 setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False)
769 if not setscene:
770 continue
771 self.runq_setscene.append(task)
772
773 def invalidate_task(fn, taskname, error_nostamp):
774 taskdep = self.dataCache.task_deps[fn]
775 fnid = self.taskData.getfn_id(fn)
776 if taskname not in taskData.tasks_lookup[fnid]:
777 logger.warn("Task %s does not exist, invalidating this task will have no effect" % taskname)
778 if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
779 if error_nostamp:
780 bb.fatal("Task %s is marked nostamp, cannot invalidate this task" % taskname)
781 else:
782 bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
783 else:
784 logger.verbose("Invalidate task %s, %s", taskname, fn)
785 bb.parse.siggen.invalidate_task(taskname, self.dataCache, fn)
786
787 # Invalidate task if force mode active
788 if self.cooker.configuration.force:
789 for (fn, target) in self.target_pairs:
790 invalidate_task(fn, target, False)
791
792 # Invalidate task if invalidate mode active
793 if self.cooker.configuration.invalidate_stamp:
794 for (fn, target) in self.target_pairs:
795 for st in self.cooker.configuration.invalidate_stamp.split(','):
796 invalidate_task(fn, "do_%s" % st, True)
797
798 # Iterate over the task list and call into the siggen code
799 dealtwith = set()
800 todeal = set(range(len(self.runq_fnid)))
801 while len(todeal) > 0:
802 for task in todeal.copy():
803 if len(self.runq_depends[task] - dealtwith) == 0:
804 dealtwith.add(task)
805 todeal.remove(task)
806 procdep = []
807 for dep in self.runq_depends[task]:
808 procdep.append(self.taskData.fn_index[self.runq_fnid[dep]] + "." + self.runq_task[dep])
809 self.runq_hash[task] = bb.parse.siggen.get_taskhash(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task], procdep, self.dataCache)
810
811 return len(self.runq_fnid)
812
813 def dump_data(self, taskQueue):
814 """
815 Dump some debug information on the internal data structures
816 """
817 logger.debug(3, "run_tasks:")
818 for task in xrange(len(self.rqdata.runq_task)):
819 logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
820 taskQueue.fn_index[self.rqdata.runq_fnid[task]],
821 self.rqdata.runq_task[task],
822 self.rqdata.runq_weight[task],
823 self.rqdata.runq_depends[task],
824 self.rqdata.runq_revdeps[task])
825
826 logger.debug(3, "sorted_tasks:")
827 for task1 in xrange(len(self.rqdata.runq_task)):
828 if task1 in self.prio_map:
829 task = self.prio_map[task1]
830 logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
831 taskQueue.fn_index[self.rqdata.runq_fnid[task]],
832 self.rqdata.runq_task[task],
833 self.rqdata.runq_weight[task],
834 self.rqdata.runq_depends[task],
835 self.rqdata.runq_revdeps[task])
836
837class RunQueue:
838 def __init__(self, cooker, cfgData, dataCache, taskData, targets):
839
840 self.cooker = cooker
841 self.cfgData = cfgData
842 self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets)
843
844 self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile"
845 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None
846 self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION", True) or None
847 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID", True) or None
848
849 self.state = runQueuePrepare
850
851 # For disk space monitor
852 self.dm = monitordisk.diskMonitor(cfgData)
853
854 self.rqexe = None
855 self.worker = None
856 self.workerpipe = None
857 self.fakeworker = None
858 self.fakeworkerpipe = None
859
860 def _start_worker(self, fakeroot = False, rqexec = None):
861 logger.debug(1, "Starting bitbake-worker")
862 magic = "decafbad"
863 if self.cooker.configuration.profile:
864 magic = "decafbadbad"
865 if fakeroot:
866 fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True)
867 fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split()
868 env = os.environ.copy()
869 for key, value in (var.split('=') for var in fakerootenv):
870 env[key] = value
871 worker = subprocess.Popen([fakerootcmd, "bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
872 else:
873 worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
874 bb.utils.nonblockingfd(worker.stdout)
875 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
876
877 workerdata = {
878 "taskdeps" : self.rqdata.dataCache.task_deps,
879 "fakerootenv" : self.rqdata.dataCache.fakerootenv,
880 "fakerootdirs" : self.rqdata.dataCache.fakerootdirs,
881 "fakerootnoenv" : self.rqdata.dataCache.fakerootnoenv,
882 "sigdata" : bb.parse.siggen.get_taskdata(),
883 "runq_hash" : self.rqdata.runq_hash,
884 "logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
885 "logdefaultverbose" : bb.msg.loggerDefaultVerbose,
886 "logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
887 "logdefaultdomain" : bb.msg.loggerDefaultDomains,
888 "prhost" : self.cooker.prhost,
889 "buildname" : self.cfgData.getVar("BUILDNAME", True),
890 "date" : self.cfgData.getVar("DATE", True),
891 "time" : self.cfgData.getVar("TIME", True),
892 }
893
894 worker.stdin.write("<cookerconfig>" + pickle.dumps(self.cooker.configuration) + "</cookerconfig>")
895 worker.stdin.write("<workerdata>" + pickle.dumps(workerdata) + "</workerdata>")
896 worker.stdin.flush()
897
898 return worker, workerpipe
899
900 def _teardown_worker(self, worker, workerpipe):
901 if not worker:
902 return
903 logger.debug(1, "Teardown for bitbake-worker")
904 try:
905 worker.stdin.write("<quit></quit>")
906 worker.stdin.flush()
907 except IOError:
908 pass
909 while worker.returncode is None:
910 workerpipe.read()
911 worker.poll()
912 while workerpipe.read():
913 continue
914 workerpipe.close()
915
916 def start_worker(self):
917 if self.worker:
918 self.teardown_workers()
919 self.teardown = False
920 self.worker, self.workerpipe = self._start_worker()
921
922 def start_fakeworker(self, rqexec):
923 if not self.fakeworker:
924 self.fakeworker, self.fakeworkerpipe = self._start_worker(True, rqexec)
925
926 def teardown_workers(self):
927 self.teardown = True
928 self._teardown_worker(self.worker, self.workerpipe)
929 self.worker = None
930 self.workerpipe = None
931 self._teardown_worker(self.fakeworker, self.fakeworkerpipe)
932 self.fakeworker = None
933 self.fakeworkerpipe = None
934
935 def read_workers(self):
936 self.workerpipe.read()
937 if self.fakeworkerpipe:
938 self.fakeworkerpipe.read()
939
940 def active_fds(self):
941 fds = []
942 if self.workerpipe:
943 fds.append(self.workerpipe.input)
944 if self.fakeworkerpipe:
945 fds.append(self.fakeworkerpipe.input)
946 return fds
947
948 def check_stamp_task(self, task, taskname = None, recurse = False, cache = None):
949 def get_timestamp(f):
950 try:
951 if not os.access(f, os.F_OK):
952 return None
953 return os.stat(f)[stat.ST_MTIME]
954 except:
955 return None
956
957 if self.stamppolicy == "perfile":
958 fulldeptree = False
959 else:
960 fulldeptree = True
961 stampwhitelist = []
962 if self.stamppolicy == "whitelist":
963 stampwhitelist = self.rqdata.stampfnwhitelist
964
965 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
966 if taskname is None:
967 taskname = self.rqdata.runq_task[task]
968
969 stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
970
971 # If the stamp is missing, it's not current
972 if not os.access(stampfile, os.F_OK):
973 logger.debug(2, "Stampfile %s not available", stampfile)
974 return False
975 # If it's a 'nostamp' task, it's not current
976 taskdep = self.rqdata.dataCache.task_deps[fn]
977 if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
978 logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
979 return False
980
981 if taskname != "do_setscene" and taskname.endswith("_setscene"):
982 return True
983
984 if cache is None:
985 cache = {}
986
987 iscurrent = True
988 t1 = get_timestamp(stampfile)
989 for dep in self.rqdata.runq_depends[task]:
990 if iscurrent:
991 fn2 = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[dep]]
992 taskname2 = self.rqdata.runq_task[dep]
993 stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
994 stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCache, fn2)
995 t2 = get_timestamp(stampfile2)
996 t3 = get_timestamp(stampfile3)
997 if t3 and t3 > t2:
998 continue
999 if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
1000 if not t2:
1001 logger.debug(2, 'Stampfile %s does not exist', stampfile2)
1002 iscurrent = False
1003 if t1 < t2:
1004 logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
1005 iscurrent = False
1006 if recurse and iscurrent:
1007 if dep in cache:
1008 iscurrent = cache[dep]
1009 if not iscurrent:
1010 logger.debug(2, 'Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2))
1011 else:
1012 iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache)
1013 cache[dep] = iscurrent
1014 if recurse:
1015 cache[task] = iscurrent
1016 return iscurrent
1017
1018 def _execute_runqueue(self):
1019 """
1020 Run the tasks in a queue prepared by rqdata.prepare()
1021 Upon failure, optionally try to recover the build using any alternate providers
1022 (if the abort on failure configuration option isn't set)
1023 """
1024
1025 retval = True
1026
1027 if self.state is runQueuePrepare:
1028 self.rqexe = RunQueueExecuteDummy(self)
1029 if self.rqdata.prepare() == 0:
1030 self.state = runQueueComplete
1031 else:
1032 self.state = runQueueSceneInit
1033
1034 # we are ready to run, see if any UI client needs the dependency info
1035 if bb.cooker.CookerFeatures.SEND_DEPENDS_TREE in self.cooker.featureset:
1036 depgraph = self.cooker.buildDependTree(self, self.rqdata.taskData)
1037 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.cooker.data)
1038
1039 if self.state is runQueueSceneInit:
1040 dump = self.cooker.configuration.dump_signatures
1041 if dump:
1042 if 'printdiff' in dump:
1043 invalidtasks = self.print_diffscenetasks()
1044 self.dump_signatures(dump)
1045 if 'printdiff' in dump:
1046 self.write_diffscenetasks(invalidtasks)
1047 self.state = runQueueComplete
1048 else:
1049 self.start_worker()
1050 self.rqexe = RunQueueExecuteScenequeue(self)
1051
1052 if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp]:
1053 self.dm.check(self)
1054
1055 if self.state is runQueueSceneRun:
1056 retval = self.rqexe.execute()
1057
1058 if self.state is runQueueRunInit:
1059 logger.info("Executing RunQueue Tasks")
1060 self.rqexe = RunQueueExecuteTasks(self)
1061 self.state = runQueueRunning
1062
1063 if self.state is runQueueRunning:
1064 retval = self.rqexe.execute()
1065
1066 if self.state is runQueueCleanUp:
1067 retval = self.rqexe.finish()
1068
1069 if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe:
1070 self.teardown_workers()
1071 if self.rqexe.stats.failed:
1072 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
1073 else:
1074 # Let's avoid the word "failed" if nothing actually did
1075 logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and all succeeded.", self.rqexe.stats.completed, self.rqexe.stats.skipped)
1076
1077 if self.state is runQueueFailed:
1078 if not self.rqdata.taskData.tryaltconfigs:
1079 raise bb.runqueue.TaskFailure(self.rqexe.failed_fnids)
1080 for fnid in self.rqexe.failed_fnids:
1081 self.rqdata.taskData.fail_fnid(fnid)
1082 self.rqdata.reset()
1083
1084 if self.state is runQueueComplete:
1085 # All done
1086 return False
1087
1088 # Loop
1089 return retval
1090
1091 def execute_runqueue(self):
1092 # Catch unexpected exceptions and ensure we exit when an error occurs, not loop.
1093 try:
1094 return self._execute_runqueue()
1095 except bb.runqueue.TaskFailure:
1096 raise
1097 except SystemExit:
1098 raise
1099 except:
1100 logger.error("An uncaught exception occured in runqueue, please see the failure below:")
1101 try:
1102 self.teardown_workers()
1103 except:
1104 pass
1105 self.state = runQueueComplete
1106 raise
1107
1108 def finish_runqueue(self, now = False):
1109 if not self.rqexe:
1110 self.state = runQueueComplete
1111 return
1112
1113 if now:
1114 self.rqexe.finish_now()
1115 else:
1116 self.rqexe.finish()
1117
1118 def dump_signatures(self, options):
1119 done = set()
1120 bb.note("Reparsing files to collect dependency data")
1121 for task in range(len(self.rqdata.runq_fnid)):
1122 if self.rqdata.runq_fnid[task] not in done:
1123 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1124 the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn), self.cooker.data)
1125 done.add(self.rqdata.runq_fnid[task])
1126
1127 bb.parse.siggen.dump_sigs(self.rqdata.dataCache, options)
1128
1129 return
1130
1131 def print_diffscenetasks(self):
1132
1133 valid = []
1134 sq_hash = []
1135 sq_hashfn = []
1136 sq_fn = []
1137 sq_taskname = []
1138 sq_task = []
1139 noexec = []
1140 stamppresent = []
1141 valid_new = set()
1142
1143 for task in xrange(len(self.rqdata.runq_fnid)):
1144 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1145 taskname = self.rqdata.runq_task[task]
1146 taskdep = self.rqdata.dataCache.task_deps[fn]
1147
1148 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1149 noexec.append(task)
1150 continue
1151
1152 sq_fn.append(fn)
1153 sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
1154 sq_hash.append(self.rqdata.runq_hash[task])
1155 sq_taskname.append(taskname)
1156 sq_task.append(task)
1157 call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
1158 locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
1159 valid = bb.utils.better_eval(call, locs)
1160 for v in valid:
1161 valid_new.add(sq_task[v])
1162
1163 # Tasks which are both setscene and noexec never care about dependencies
1164 # We therefore find tasks which are setscene and noexec and mark their
1165 # unique dependencies as valid.
1166 for task in noexec:
1167 if task not in self.rqdata.runq_setscene:
1168 continue
1169 for dep in self.rqdata.runq_depends[task]:
1170 hasnoexecparents = True
1171 for dep2 in self.rqdata.runq_revdeps[dep]:
1172 if dep2 in self.rqdata.runq_setscene and dep2 in noexec:
1173 continue
1174 hasnoexecparents = False
1175 break
1176 if hasnoexecparents:
1177 valid_new.add(dep)
1178
1179 invalidtasks = set()
1180 for task in xrange(len(self.rqdata.runq_fnid)):
1181 if task not in valid_new and task not in noexec:
1182 invalidtasks.add(task)
1183
1184 found = set()
1185 processed = set()
1186 for task in invalidtasks:
1187 toprocess = set([task])
1188 while toprocess:
1189 next = set()
1190 for t in toprocess:
1191 for dep in self.rqdata.runq_depends[t]:
1192 if dep in invalidtasks:
1193 found.add(task)
1194 if dep not in processed:
1195 processed.add(dep)
1196 next.add(dep)
1197 toprocess = next
1198 if task in found:
1199 toprocess = set()
1200
1201 tasklist = []
1202 for task in invalidtasks.difference(found):
1203 tasklist.append(self.rqdata.get_user_idstring(task))
1204
1205 if tasklist:
1206 bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist))
1207
1208 return invalidtasks.difference(found)
1209
1210 def write_diffscenetasks(self, invalidtasks):
1211
1212 # Define recursion callback
1213 def recursecb(key, hash1, hash2):
1214 hashes = [hash1, hash2]
1215 hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData)
1216
1217 recout = []
1218 if len(hashfiles) == 2:
1219 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb)
1220 recout.extend(list(' ' + l for l in out2))
1221 else:
1222 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
1223
1224 return recout
1225
1226
1227 for task in invalidtasks:
1228 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1229 pn = self.rqdata.dataCache.pkg_fn[fn]
1230 taskname = self.rqdata.runq_task[task]
1231 h = self.rqdata.runq_hash[task]
1232 matches = bb.siggen.find_siginfo(pn, taskname, [], self.cfgData)
1233 match = None
1234 for m in matches:
1235 if h in m:
1236 match = m
1237 if match is None:
1238 bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h)
1239 matches = {k : v for k, v in matches.iteritems() if h not in k}
1240 if matches:
1241 latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1]
1242 prevh = __find_md5__.search(latestmatch).group(0)
1243 output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
1244 bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
1245
1246class RunQueueExecute:
1247
1248 def __init__(self, rq):
1249 self.rq = rq
1250 self.cooker = rq.cooker
1251 self.cfgData = rq.cfgData
1252 self.rqdata = rq.rqdata
1253
1254 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", True) or 1)
1255 self.scheduler = self.cfgData.getVar("BB_SCHEDULER", True) or "speed"
1256
1257 self.runq_buildable = []
1258 self.runq_running = []
1259 self.runq_complete = []
1260
1261 self.build_stamps = {}
1262 self.build_stamps2 = []
1263 self.failed_fnids = []
1264
1265 self.stampcache = {}
1266
1267 rq.workerpipe.setrunqueueexec(self)
1268 if rq.fakeworkerpipe:
1269 rq.fakeworkerpipe.setrunqueueexec(self)
1270
1271 def runqueue_process_waitpid(self, task, status):
1272
1273 # self.build_stamps[pid] may not exist when use shared work directory.
1274 if task in self.build_stamps:
1275 self.build_stamps2.remove(self.build_stamps[task])
1276 del self.build_stamps[task]
1277
1278 if status != 0:
1279 self.task_fail(task, status)
1280 else:
1281 self.task_complete(task)
1282 return True
1283
1284 def finish_now(self):
1285
1286 for worker in [self.rq.worker, self.rq.fakeworker]:
1287 if not worker:
1288 continue
1289 try:
1290 worker.stdin.write("<finishnow></finishnow>")
1291 worker.stdin.flush()
1292 except IOError:
1293 # worker must have died?
1294 pass
1295
1296 if len(self.failed_fnids) != 0:
1297 self.rq.state = runQueueFailed
1298 return
1299
1300 self.rq.state = runQueueComplete
1301 return
1302
1303 def finish(self):
1304 self.rq.state = runQueueCleanUp
1305
1306 if self.stats.active > 0:
1307 bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
1308 self.rq.read_workers()
1309 return self.rq.active_fds()
1310
1311 if len(self.failed_fnids) != 0:
1312 self.rq.state = runQueueFailed
1313 return True
1314
1315 self.rq.state = runQueueComplete
1316 return True
1317
1318 def check_dependencies(self, task, taskdeps, setscene = False):
1319 if not self.rq.depvalidate:
1320 return False
1321
1322 taskdata = {}
1323 taskdeps.add(task)
1324 for dep in taskdeps:
1325 if setscene:
1326 depid = self.rqdata.runq_setscene[dep]
1327 else:
1328 depid = dep
1329 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[depid]]
1330 pn = self.rqdata.dataCache.pkg_fn[fn]
1331 taskname = self.rqdata.runq_task[depid]
1332 taskdata[dep] = [pn, taskname, fn]
1333 call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
1334 locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data }
1335 valid = bb.utils.better_eval(call, locs)
1336 return valid
1337
1338class RunQueueExecuteDummy(RunQueueExecute):
1339 def __init__(self, rq):
1340 self.rq = rq
1341 self.stats = RunQueueStats(0)
1342
1343 def finish(self):
1344 self.rq.state = runQueueComplete
1345 return
1346
1347class RunQueueExecuteTasks(RunQueueExecute):
1348 def __init__(self, rq):
1349 RunQueueExecute.__init__(self, rq)
1350
1351 self.stats = RunQueueStats(len(self.rqdata.runq_fnid))
1352
1353 self.stampcache = {}
1354
1355 initial_covered = self.rq.scenequeue_covered.copy()
1356
1357 # Mark initial buildable tasks
1358 for task in xrange(self.stats.total):
1359 self.runq_running.append(0)
1360 self.runq_complete.append(0)
1361 if len(self.rqdata.runq_depends[task]) == 0:
1362 self.runq_buildable.append(1)
1363 else:
1364 self.runq_buildable.append(0)
1365 if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered) and task not in self.rq.scenequeue_notcovered:
1366 self.rq.scenequeue_covered.add(task)
1367
1368 found = True
1369 while found:
1370 found = False
1371 for task in xrange(self.stats.total):
1372 if task in self.rq.scenequeue_covered:
1373 continue
1374 logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task])))
1375
1376 if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered) and task not in self.rq.scenequeue_notcovered:
1377 found = True
1378 self.rq.scenequeue_covered.add(task)
1379
1380 logger.debug(1, 'Skip list (pre setsceneverify) %s', sorted(self.rq.scenequeue_covered))
1381
1382 # Allow the metadata to elect for setscene tasks to run anyway
1383 covered_remove = set()
1384 if self.rq.setsceneverify:
1385 invalidtasks = []
1386 for task in xrange(len(self.rqdata.runq_task)):
1387 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1388 taskname = self.rqdata.runq_task[task]
1389 taskdep = self.rqdata.dataCache.task_deps[fn]
1390
1391 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1392 continue
1393 if self.rq.check_stamp_task(task, taskname + "_setscene", cache=self.stampcache):
1394 logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
1395 continue
1396 if self.rq.check_stamp_task(task, taskname, recurse = True, cache=self.stampcache):
1397 logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
1398 continue
1399 invalidtasks.append(task)
1400
1401 call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
1402 call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
1403 locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.data, "invalidtasks" : invalidtasks }
1404 # Backwards compatibility with older versions without invalidtasks
1405 try:
1406 covered_remove = bb.utils.better_eval(call, locs)
1407 except TypeError:
1408 covered_remove = bb.utils.better_eval(call2, locs)
1409
1410 def removecoveredtask(task):
1411 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1412 taskname = self.rqdata.runq_task[task] + '_setscene'
1413 bb.build.del_stamp(taskname, self.rqdata.dataCache, fn)
1414 self.rq.scenequeue_covered.remove(task)
1415
1416 toremove = covered_remove
1417 for task in toremove:
1418 logger.debug(1, 'Not skipping task %s due to setsceneverify', task)
1419 while toremove:
1420 covered_remove = []
1421 for task in toremove:
1422 removecoveredtask(task)
1423 for deptask in self.rqdata.runq_depends[task]:
1424 if deptask not in self.rq.scenequeue_covered:
1425 continue
1426 if deptask in toremove or deptask in covered_remove or deptask in initial_covered:
1427 continue
1428 logger.debug(1, 'Task %s depends on task %s so not skipping' % (task, deptask))
1429 covered_remove.append(deptask)
1430 toremove = covered_remove
1431
1432 logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
1433
1434 event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData)
1435
1436 schedulers = self.get_schedulers()
1437 for scheduler in schedulers:
1438 if self.scheduler == scheduler.name:
1439 self.sched = scheduler(self, self.rqdata)
1440 logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
1441 break
1442 else:
1443 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
1444 (self.scheduler, ", ".join(obj.name for obj in schedulers)))
1445
1446 def get_schedulers(self):
1447 schedulers = set(obj for obj in globals().values()
1448 if type(obj) is type and
1449 issubclass(obj, RunQueueScheduler))
1450
1451 user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True)
1452 if user_schedulers:
1453 for sched in user_schedulers.split():
1454 if not "." in sched:
1455 bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched)
1456 continue
1457
1458 modname, name = sched.rsplit(".", 1)
1459 try:
1460 module = __import__(modname, fromlist=(name,))
1461 except ImportError as exc:
1462 logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
1463 raise SystemExit(1)
1464 else:
1465 schedulers.add(getattr(module, name))
1466 return schedulers
1467
1468 def setbuildable(self, task):
1469 self.runq_buildable[task] = 1
1470 self.sched.newbuilable(task)
1471
1472 def task_completeoutright(self, task):
1473 """
1474 Mark a task as completed
1475 Look at the reverse dependencies and mark any task with
1476 completed dependencies as buildable
1477 """
1478 self.runq_complete[task] = 1
1479 for revdep in self.rqdata.runq_revdeps[task]:
1480 if self.runq_running[revdep] == 1:
1481 continue
1482 if self.runq_buildable[revdep] == 1:
1483 continue
1484 alldeps = 1
1485 for dep in self.rqdata.runq_depends[revdep]:
1486 if self.runq_complete[dep] != 1:
1487 alldeps = 0
1488 if alldeps == 1:
1489 self.setbuildable(revdep)
1490 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
1491 taskname = self.rqdata.runq_task[revdep]
1492 logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
1493
1494 def task_complete(self, task):
1495 self.stats.taskCompleted()
1496 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
1497 self.task_completeoutright(task)
1498
1499 def task_fail(self, task, exitcode):
1500 """
1501 Called when a task has failed
1502 Updates the state engine with the failure
1503 """
1504 self.stats.taskFailed()
1505 fnid = self.rqdata.runq_fnid[task]
1506 self.failed_fnids.append(fnid)
1507 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData)
1508 if self.rqdata.taskData.abort:
1509 self.rq.state = runQueueCleanUp
1510
1511 def task_skip(self, task, reason):
1512 self.runq_running[task] = 1
1513 self.setbuildable(task)
1514 bb.event.fire(runQueueTaskSkipped(task, self.stats, self.rq, reason), self.cfgData)
1515 self.task_completeoutright(task)
1516 self.stats.taskCompleted()
1517 self.stats.taskSkipped()
1518
1519 def execute(self):
1520 """
1521 Run the tasks in a queue prepared by rqdata.prepare()
1522 """
1523
1524 self.rq.read_workers()
1525
1526
1527 if self.stats.total == 0:
1528 # nothing to do
1529 self.rq.state = runQueueCleanUp
1530
1531 task = self.sched.next()
1532 if task is not None:
1533 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
1534 taskname = self.rqdata.runq_task[task]
1535
1536 if task in self.rq.scenequeue_covered:
1537 logger.debug(2, "Setscene covered task %s (%s)", task,
1538 self.rqdata.get_user_idstring(task))
1539 self.task_skip(task, "covered")
1540 return True
1541
1542 if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
1543 logger.debug(2, "Stamp current task %s (%s)", task,
1544 self.rqdata.get_user_idstring(task))
1545 self.task_skip(task, "existing")
1546 return True
1547
1548 taskdep = self.rqdata.dataCache.task_deps[fn]
1549 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1550 startevent = runQueueTaskStarted(task, self.stats, self.rq,
1551 noexec=True)
1552 bb.event.fire(startevent, self.cfgData)
1553 self.runq_running[task] = 1
1554 self.stats.taskActive()
1555 if not self.cooker.configuration.dry_run:
1556 bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
1557 self.task_complete(task)
1558 return True
1559 else:
1560 startevent = runQueueTaskStarted(task, self.stats, self.rq)
1561 bb.event.fire(startevent, self.cfgData)
1562
1563 taskdepdata = self.build_taskdepdata(task)
1564
1565 taskdep = self.rqdata.dataCache.task_deps[fn]
1566 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
1567 if not self.rq.fakeworker:
1568 self.rq.start_fakeworker(self)
1569 self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
1570 self.rq.fakeworker.stdin.flush()
1571 else:
1572 self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
1573 self.rq.worker.stdin.flush()
1574
1575 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
1576 self.build_stamps2.append(self.build_stamps[task])
1577 self.runq_running[task] = 1
1578 self.stats.taskActive()
1579 if self.stats.active < self.number_tasks:
1580 return True
1581
1582 if self.stats.active > 0:
1583 self.rq.read_workers()
1584 return self.rq.active_fds()
1585
1586 if len(self.failed_fnids) != 0:
1587 self.rq.state = runQueueFailed
1588 return True
1589
1590 # Sanity Checks
1591 for task in xrange(self.stats.total):
1592 if self.runq_buildable[task] == 0:
1593 logger.error("Task %s never buildable!", task)
1594 if self.runq_running[task] == 0:
1595 logger.error("Task %s never ran!", task)
1596 if self.runq_complete[task] == 0:
1597 logger.error("Task %s never completed!", task)
1598 self.rq.state = runQueueComplete
1599
1600 return True
1601
1602 def build_taskdepdata(self, task):
1603 taskdepdata = {}
1604 next = self.rqdata.runq_depends[task]
1605 next.add(task)
1606 while next:
1607 additional = []
1608 for revdep in next:
1609 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
1610 pn = self.rqdata.dataCache.pkg_fn[fn]
1611 taskname = self.rqdata.runq_task[revdep]
1612 deps = self.rqdata.runq_depends[revdep]
1613 taskdepdata[revdep] = [pn, taskname, fn, deps]
1614 for revdep2 in deps:
1615 if revdep2 not in taskdepdata:
1616 additional.append(revdep2)
1617 next = additional
1618
1619 #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n"))
1620 return taskdepdata
1621
1622class RunQueueExecuteScenequeue(RunQueueExecute):
1623 def __init__(self, rq):
1624 RunQueueExecute.__init__(self, rq)
1625
1626 self.scenequeue_covered = set()
1627 self.scenequeue_notcovered = set()
1628 self.scenequeue_notneeded = set()
1629
1630 # If we don't have any setscene functions, skip this step
1631 if len(self.rqdata.runq_setscene) == 0:
1632 rq.scenequeue_covered = set()
1633 rq.state = runQueueRunInit
1634 return
1635
1636 self.stats = RunQueueStats(len(self.rqdata.runq_setscene))
1637
1638 sq_revdeps = []
1639 sq_revdeps_new = []
1640 sq_revdeps_squash = []
1641 self.sq_harddeps = {}
1642
1643 # We need to construct a dependency graph for the setscene functions. Intermediate
1644 # dependencies between the setscene tasks only complicate the code. This code
1645 # therefore aims to collapse the huge runqueue dependency tree into a smaller one
1646 # only containing the setscene functions.
1647
1648 for task in xrange(self.stats.total):
1649 self.runq_running.append(0)
1650 self.runq_complete.append(0)
1651 self.runq_buildable.append(0)
1652
1653 # First process the chains up to the first setscene task.
1654 endpoints = {}
1655 for task in xrange(len(self.rqdata.runq_fnid)):
1656 sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task]))
1657 sq_revdeps_new.append(set())
1658 if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
1659 endpoints[task] = set()
1660
1661 # Secondly process the chains between setscene tasks.
1662 for task in self.rqdata.runq_setscene:
1663 for dep in self.rqdata.runq_depends[task]:
1664 if dep not in endpoints:
1665 endpoints[dep] = set()
1666 endpoints[dep].add(task)
1667
1668 def process_endpoints(endpoints):
1669 newendpoints = {}
1670 for point, task in endpoints.items():
1671 tasks = set()
1672 if task:
1673 tasks |= task
1674 if sq_revdeps_new[point]:
1675 tasks |= sq_revdeps_new[point]
1676 sq_revdeps_new[point] = set()
1677 if point in self.rqdata.runq_setscene:
1678 sq_revdeps_new[point] = tasks
1679 for dep in self.rqdata.runq_depends[point]:
1680 if point in sq_revdeps[dep]:
1681 sq_revdeps[dep].remove(point)
1682 if tasks:
1683 sq_revdeps_new[dep] |= tasks
1684 if (len(sq_revdeps[dep]) == 0 or len(sq_revdeps_new[dep]) != 0) and dep not in self.rqdata.runq_setscene:
1685 newendpoints[dep] = task
1686 if len(newendpoints) != 0:
1687 process_endpoints(newendpoints)
1688
1689 process_endpoints(endpoints)
1690
1691 # Build a list of setscene tasks which are "unskippable"
1692 # These are direct endpoints referenced by the build
1693 endpoints2 = {}
1694 sq_revdeps2 = []
1695 sq_revdeps_new2 = []
1696 def process_endpoints2(endpoints):
1697 newendpoints = {}
1698 for point, task in endpoints.items():
1699 tasks = set([point])
1700 if task:
1701 tasks |= task
1702 if sq_revdeps_new2[point]:
1703 tasks |= sq_revdeps_new2[point]
1704 sq_revdeps_new2[point] = set()
1705 if point in self.rqdata.runq_setscene:
1706 sq_revdeps_new2[point] = tasks
1707 for dep in self.rqdata.runq_depends[point]:
1708 if point in sq_revdeps2[dep]:
1709 sq_revdeps2[dep].remove(point)
1710 if tasks:
1711 sq_revdeps_new2[dep] |= tasks
1712 if (len(sq_revdeps2[dep]) == 0 or len(sq_revdeps_new2[dep]) != 0) and dep not in self.rqdata.runq_setscene:
1713 newendpoints[dep] = tasks
1714 if len(newendpoints) != 0:
1715 process_endpoints2(newendpoints)
1716 for task in xrange(len(self.rqdata.runq_fnid)):
1717 sq_revdeps2.append(copy.copy(self.rqdata.runq_revdeps[task]))
1718 sq_revdeps_new2.append(set())
1719 if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
1720 endpoints2[task] = set()
1721 process_endpoints2(endpoints2)
1722 self.unskippable = []
1723 for task in self.rqdata.runq_setscene:
1724 if sq_revdeps_new2[task]:
1725 self.unskippable.append(self.rqdata.runq_setscene.index(task))
1726
1727 for task in xrange(len(self.rqdata.runq_fnid)):
1728 if task in self.rqdata.runq_setscene:
1729 deps = set()
1730 for dep in sq_revdeps_new[task]:
1731 deps.add(self.rqdata.runq_setscene.index(dep))
1732 sq_revdeps_squash.append(deps)
1733 elif len(sq_revdeps_new[task]) != 0:
1734 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.")
1735
1736 # Resolve setscene inter-task dependencies
1737 # e.g. do_sometask_setscene[depends] = "targetname:do_someothertask_setscene"
1738 # Note that anything explicitly depended upon will have its reverse dependencies removed to avoid circular dependencies
1739 for task in self.rqdata.runq_setscene:
1740 realid = self.rqdata.taskData.gettask_id(self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]], self.rqdata.runq_task[task] + "_setscene", False)
1741 idepends = self.rqdata.taskData.tasks_idepends[realid]
1742 for (depid, idependtask) in idepends:
1743 if depid not in self.rqdata.taskData.build_targets:
1744 continue
1745
1746 depdata = self.rqdata.taskData.build_targets[depid][0]
1747 if depdata is None:
1748 continue
1749 dep = self.rqdata.taskData.fn_index[depdata]
1750 taskid = self.rqdata.get_task_id(self.rqdata.taskData.getfn_id(dep), idependtask.replace("_setscene", ""))
1751 if taskid is None:
1752 bb.msg.fatal("RunQueue", "Task %s_setscene depends upon non-existent task %s:%s" % (self.rqdata.get_user_idstring(task), dep, idependtask))
1753
1754 if not self.rqdata.runq_setscene.index(taskid) in self.sq_harddeps:
1755 self.sq_harddeps[self.rqdata.runq_setscene.index(taskid)] = set()
1756 self.sq_harddeps[self.rqdata.runq_setscene.index(taskid)].add(self.rqdata.runq_setscene.index(task))
1757
1758 sq_revdeps_squash[self.rqdata.runq_setscene.index(task)].add(self.rqdata.runq_setscene.index(taskid))
1759 # Have to zero this to avoid circular dependencies
1760 sq_revdeps_squash[self.rqdata.runq_setscene.index(taskid)] = set()
1761
1762 for task in self.sq_harddeps:
1763 for dep in self.sq_harddeps[task]:
1764 sq_revdeps_squash[dep].add(task)
1765
1766 #for task in xrange(len(sq_revdeps_squash)):
1767 # realtask = self.rqdata.runq_setscene[task]
1768 # bb.warn("Task %s: %s_setscene is %s " % (task, self.rqdata.get_user_idstring(realtask) , sq_revdeps_squash[task]))
1769
1770 self.sq_deps = []
1771 self.sq_revdeps = sq_revdeps_squash
1772 self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps)
1773
1774 for task in xrange(len(self.sq_revdeps)):
1775 self.sq_deps.append(set())
1776 for task in xrange(len(self.sq_revdeps)):
1777 for dep in self.sq_revdeps[task]:
1778 self.sq_deps[dep].add(task)
1779
1780 for task in xrange(len(self.sq_revdeps)):
1781 if len(self.sq_revdeps[task]) == 0:
1782 self.runq_buildable[task] = 1
1783
1784 self.outrightfail = []
1785 if self.rq.hashvalidate:
1786 sq_hash = []
1787 sq_hashfn = []
1788 sq_fn = []
1789 sq_taskname = []
1790 sq_task = []
1791 noexec = []
1792 stamppresent = []
1793 for task in xrange(len(self.sq_revdeps)):
1794 realtask = self.rqdata.runq_setscene[task]
1795 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
1796 taskname = self.rqdata.runq_task[realtask]
1797 taskdep = self.rqdata.dataCache.task_deps[fn]
1798
1799 if 'noexec' in taskdep and taskname in taskdep['noexec']:
1800 noexec.append(task)
1801 self.task_skip(task)
1802 bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCache, fn)
1803 continue
1804
1805 if self.rq.check_stamp_task(realtask, taskname + "_setscene", cache=self.stampcache):
1806 logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
1807 stamppresent.append(task)
1808 self.task_skip(task)
1809 continue
1810
1811 if self.rq.check_stamp_task(realtask, taskname, recurse = True, cache=self.stampcache):
1812 logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
1813 stamppresent.append(task)
1814 self.task_skip(task)
1815 continue
1816
1817 sq_fn.append(fn)
1818 sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
1819 sq_hash.append(self.rqdata.runq_hash[realtask])
1820 sq_taskname.append(taskname)
1821 sq_task.append(task)
1822 call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
1823 locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
1824 valid = bb.utils.better_eval(call, locs)
1825
1826 valid_new = stamppresent
1827 for v in valid:
1828 valid_new.append(sq_task[v])
1829
1830 for task in xrange(len(self.sq_revdeps)):
1831 if task not in valid_new and task not in noexec:
1832 realtask = self.rqdata.runq_setscene[task]
1833 logger.debug(2, 'No package found, so skipping setscene task %s',
1834 self.rqdata.get_user_idstring(realtask))
1835 self.outrightfail.append(task)
1836
1837 logger.info('Executing SetScene Tasks')
1838
1839 self.rq.state = runQueueSceneRun
1840
1841 def scenequeue_updatecounters(self, task, fail = False):
1842 for dep in self.sq_deps[task]:
1843 if fail and task in self.sq_harddeps and dep in self.sq_harddeps[task]:
1844 realtask = self.rqdata.runq_setscene[task]
1845 realdep = self.rqdata.runq_setscene[dep]
1846 logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (self.rqdata.get_user_idstring(realtask), self.rqdata.get_user_idstring(realdep)))
1847 self.scenequeue_updatecounters(dep, fail)
1848 continue
1849 if task not in self.sq_revdeps2[dep]:
1850 # May already have been removed by the fail case above
1851 continue
1852 self.sq_revdeps2[dep].remove(task)
1853 if len(self.sq_revdeps2[dep]) == 0:
1854 self.runq_buildable[dep] = 1
1855
1856 def task_completeoutright(self, task):
1857 """
1858 Mark a task as completed
1859 Look at the reverse dependencies and mark any task with
1860 completed dependencies as buildable
1861 """
1862
1863 index = self.rqdata.runq_setscene[task]
1864 logger.debug(1, 'Found task %s which could be accelerated',
1865 self.rqdata.get_user_idstring(index))
1866
1867 self.scenequeue_covered.add(task)
1868 self.scenequeue_updatecounters(task)
1869
1870 def task_complete(self, task):
1871 self.stats.taskCompleted()
1872 bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
1873 self.task_completeoutright(task)
1874
1875 def task_fail(self, task, result):
1876 self.stats.taskFailed()
1877 bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
1878 self.scenequeue_notcovered.add(task)
1879 self.scenequeue_updatecounters(task, True)
1880
1881 def task_failoutright(self, task):
1882 self.runq_running[task] = 1
1883 self.runq_buildable[task] = 1
1884 self.stats.taskCompleted()
1885 self.stats.taskSkipped()
1886 index = self.rqdata.runq_setscene[task]
1887 self.scenequeue_notcovered.add(task)
1888 self.scenequeue_updatecounters(task, True)
1889
1890 def task_skip(self, task):
1891 self.runq_running[task] = 1
1892 self.runq_buildable[task] = 1
1893 self.task_completeoutright(task)
1894 self.stats.taskCompleted()
1895 self.stats.taskSkipped()
1896
1897 def execute(self):
1898 """
1899 Run the tasks in a queue prepared by prepare_runqueue
1900 """
1901
1902 self.rq.read_workers()
1903
1904 task = None
1905 if self.stats.active < self.number_tasks:
1906 # Find the next setscene to run
1907 for nexttask in xrange(self.stats.total):
1908 if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1:
1909 if nexttask in self.unskippable:
1910 logger.debug(2, "Setscene task %s is unskippable" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask]))
1911 if nexttask not in self.unskippable and len(self.sq_revdeps[nexttask]) > 0 and self.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sq_revdeps[nexttask], True):
1912 realtask = self.rqdata.runq_setscene[nexttask]
1913 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
1914 foundtarget = False
1915 for target in self.rqdata.target_pairs:
1916 if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
1917 foundtarget = True
1918 break
1919 if not foundtarget:
1920 logger.debug(2, "Skipping setscene for task %s" % self.rqdata.get_user_idstring(self.rqdata.runq_setscene[nexttask]))
1921 self.task_skip(nexttask)
1922 self.scenequeue_notneeded.add(nexttask)
1923 return True
1924 if nexttask in self.outrightfail:
1925 self.task_failoutright(nexttask)
1926 return True
1927 task = nexttask
1928 break
1929 if task is not None:
1930 realtask = self.rqdata.runq_setscene[task]
1931 fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
1932
1933 taskname = self.rqdata.runq_task[realtask] + "_setscene"
1934 if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask], recurse = True, cache=self.stampcache):
1935 logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
1936 task, self.rqdata.get_user_idstring(realtask))
1937 self.task_failoutright(task)
1938 return True
1939
1940 if self.cooker.configuration.force:
1941 for target in self.rqdata.target_pairs:
1942 if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
1943 self.task_failoutright(task)
1944 return True
1945
1946 if self.rq.check_stamp_task(realtask, taskname, cache=self.stampcache):
1947 logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
1948 task, self.rqdata.get_user_idstring(realtask))
1949 self.task_skip(task)
1950 return True
1951
1952 startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
1953 bb.event.fire(startevent, self.cfgData)
1954
1955 taskdep = self.rqdata.dataCache.task_deps[fn]
1956 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot']:
1957 if not self.rq.fakeworker:
1958 self.rq.start_fakeworker(self)
1959 self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
1960 self.rq.fakeworker.stdin.flush()
1961 else:
1962 self.rq.worker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
1963 self.rq.worker.stdin.flush()
1964
1965 self.runq_running[task] = 1
1966 self.stats.taskActive()
1967 if self.stats.active < self.number_tasks:
1968 return True
1969
1970 if self.stats.active > 0:
1971 self.rq.read_workers()
1972 return self.rq.active_fds()
1973
1974 #for task in xrange(self.stats.total):
1975 # if self.runq_running[task] != 1:
1976 # buildable = self.runq_buildable[task]
1977 # revdeps = self.sq_revdeps[task]
1978 # bb.warn("Found we didn't run %s %s %s %s" % (task, buildable, str(revdeps), self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task])))
1979
1980 # Convert scenequeue_covered task numbers into full taskgraph ids
1981 oldcovered = self.scenequeue_covered
1982 self.rq.scenequeue_covered = set()
1983 for task in oldcovered:
1984 self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task])
1985 self.rq.scenequeue_notcovered = set()
1986 for task in self.scenequeue_notcovered:
1987 self.rq.scenequeue_notcovered.add(self.rqdata.runq_setscene[task])
1988
1989 logger.debug(1, 'We can skip tasks %s', sorted(self.rq.scenequeue_covered))
1990
1991 self.rq.state = runQueueRunInit
1992
1993 completeevent = sceneQueueComplete(self.stats, self.rq)
1994 bb.event.fire(completeevent, self.cfgData)
1995
1996 return True
1997
1998 def runqueue_process_waitpid(self, task, status):
1999 task = self.rq.rqdata.runq_setscene.index(task)
2000
2001 RunQueueExecute.runqueue_process_waitpid(self, task, status)
2002
2003class TaskFailure(Exception):
2004 """
2005 Exception raised when a task in a runqueue fails
2006 """
2007 def __init__(self, x):
2008 self.args = x
2009
2010
2011class runQueueExitWait(bb.event.Event):
2012 """
2013 Event when waiting for task processes to exit
2014 """
2015
2016 def __init__(self, remain):
2017 self.remain = remain
2018 self.message = "Waiting for %s active tasks to finish" % remain
2019 bb.event.Event.__init__(self)
2020
2021class runQueueEvent(bb.event.Event):
2022 """
2023 Base runQueue event class
2024 """
2025 def __init__(self, task, stats, rq):
2026 self.taskid = task
2027 self.taskstring = rq.rqdata.get_user_idstring(task)
2028 self.taskname = rq.rqdata.get_task_name(task)
2029 self.taskfile = rq.rqdata.get_task_file(task)
2030 self.taskhash = rq.rqdata.get_task_hash(task)
2031 self.stats = stats.copy()
2032 bb.event.Event.__init__(self)
2033
2034class sceneQueueEvent(runQueueEvent):
2035 """
2036 Base sceneQueue event class
2037 """
2038 def __init__(self, task, stats, rq, noexec=False):
2039 runQueueEvent.__init__(self, task, stats, rq)
2040 realtask = rq.rqdata.runq_setscene[task]
2041 self.taskstring = rq.rqdata.get_user_idstring(realtask, "_setscene")
2042 self.taskname = rq.rqdata.get_task_name(realtask) + "_setscene"
2043 self.taskfile = rq.rqdata.get_task_file(realtask)
2044 self.taskhash = rq.rqdata.get_task_hash(realtask)
2045
2046class runQueueTaskStarted(runQueueEvent):
2047 """
2048 Event notifying a task was started
2049 """
2050 def __init__(self, task, stats, rq, noexec=False):
2051 runQueueEvent.__init__(self, task, stats, rq)
2052 self.noexec = noexec
2053
2054class sceneQueueTaskStarted(sceneQueueEvent):
2055 """
2056 Event notifying a setscene task was started
2057 """
2058 def __init__(self, task, stats, rq, noexec=False):
2059 sceneQueueEvent.__init__(self, task, stats, rq)
2060 self.noexec = noexec
2061
2062class runQueueTaskFailed(runQueueEvent):
2063 """
2064 Event notifying a task failed
2065 """
2066 def __init__(self, task, stats, exitcode, rq):
2067 runQueueEvent.__init__(self, task, stats, rq)
2068 self.exitcode = exitcode
2069
2070class sceneQueueTaskFailed(sceneQueueEvent):
2071 """
2072 Event notifying a setscene task failed
2073 """
2074 def __init__(self, task, stats, exitcode, rq):
2075 sceneQueueEvent.__init__(self, task, stats, rq)
2076 self.exitcode = exitcode
2077
2078class sceneQueueComplete(sceneQueueEvent):
2079 """
2080 Event when all the sceneQueue tasks are complete
2081 """
2082 def __init__(self, stats, rq):
2083 self.stats = stats.copy()
2084 bb.event.Event.__init__(self)
2085
2086class runQueueTaskCompleted(runQueueEvent):
2087 """
2088 Event notifying a task completed
2089 """
2090
2091class sceneQueueTaskCompleted(sceneQueueEvent):
2092 """
2093 Event notifying a setscene task completed
2094 """
2095
2096class runQueueTaskSkipped(runQueueEvent):
2097 """
2098 Event notifying a task was skipped
2099 """
2100 def __init__(self, task, stats, rq, reason):
2101 runQueueEvent.__init__(self, task, stats, rq)
2102 self.reason = reason
2103
2104class runQueuePipe():
2105 """
2106 Abstraction for a pipe between a worker thread and the server
2107 """
2108 def __init__(self, pipein, pipeout, d, rq, rqexec):
2109 self.input = pipein
2110 if pipeout:
2111 pipeout.close()
2112 bb.utils.nonblockingfd(self.input)
2113 self.queue = ""
2114 self.d = d
2115 self.rq = rq
2116 self.rqexec = rqexec
2117
2118 def setrunqueueexec(self, rqexec):
2119 self.rqexec = rqexec
2120
2121 def read(self):
2122 for w in [self.rq.worker, self.rq.fakeworker]:
2123 if not w:
2124 continue
2125 w.poll()
2126 if w.returncode is not None and not self.rq.teardown:
2127 name = None
2128 if self.rq.worker and w.pid == self.rq.worker.pid:
2129 name = "Worker"
2130 elif self.rq.fakeworker and w.pid == self.rq.fakeworker.pid:
2131 name = "Fakeroot"
2132 bb.error("%s process (%s) exited unexpectedly (%s), shutting down..." % (name, w.pid, str(w.returncode)))
2133 self.rq.finish_runqueue(True)
2134
2135 start = len(self.queue)
2136 try:
2137 self.queue = self.queue + self.input.read(102400)
2138 except (OSError, IOError) as e:
2139 if e.errno != errno.EAGAIN:
2140 raise
2141 end = len(self.queue)
2142 found = True
2143 while found and len(self.queue):
2144 found = False
2145 index = self.queue.find("</event>")
2146 while index != -1 and self.queue.startswith("<event>"):
2147 try:
2148 event = pickle.loads(self.queue[7:index])
2149 except ValueError as e:
2150 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[7:index]))
2151 bb.event.fire_from_worker(event, self.d)
2152 found = True
2153 self.queue = self.queue[index+8:]
2154 index = self.queue.find("</event>")
2155 index = self.queue.find("</exitcode>")
2156 while index != -1 and self.queue.startswith("<exitcode>"):
2157 try:
2158 task, status = pickle.loads(self.queue[10:index])
2159 except ValueError as e:
2160 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index]))
2161 self.rqexec.runqueue_process_waitpid(task, status)
2162 found = True
2163 self.queue = self.queue[index+11:]
2164 index = self.queue.find("</exitcode>")
2165 return (end > start)
2166
2167 def close(self):
2168 while self.read():
2169 continue
2170 if len(self.queue) > 0:
2171 print("Warning, worker left partial message: %s" % self.queue)
2172 self.input.close()
diff --git a/bitbake/lib/bb/server/__init__.py b/bitbake/lib/bb/server/__init__.py
new file mode 100644
index 0000000000..da5e480740
--- /dev/null
+++ b/bitbake/lib/bb/server/__init__.py
@@ -0,0 +1,96 @@
1#
2# BitBake Base Server Code
3#
4# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
5# Copyright (C) 2006 - 2008 Richard Purdie
6# Copyright (C) 2013 Alexandru Damian
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21""" Base code for Bitbake server process
22
23Have a common base for that all Bitbake server classes ensures a consistent
24approach to the interface, and minimize risks associated with code duplication.
25
26"""
27
28""" BaseImplServer() the base class for all XXServer() implementations.
29
30 These classes contain the actual code that runs the server side, i.e.
31 listens for the commands and executes them. Although these implementations
32 contain all the data of the original bitbake command, i.e the cooker instance,
33 they may well run on a different process or even machine.
34
35"""
36
37class BaseImplServer():
38 def __init__(self):
39 self._idlefuns = {}
40
41 def addcooker(self, cooker):
42 self.cooker = cooker
43
44 def register_idle_function(self, function, data):
45 """Register a function to be called while the server is idle"""
46 assert hasattr(function, '__call__')
47 self._idlefuns[function] = data
48
49
50
51""" BitBakeBaseServerConnection class is the common ancestor to all
52 BitBakeServerConnection classes.
53
54 These classes control the remote server. The only command currently
55 implemented is the terminate() command.
56
57"""
58
59class BitBakeBaseServerConnection():
60 def __init__(self, serverImpl):
61 pass
62
63 def terminate(self):
64 pass
65
66
67""" BitBakeBaseServer class is the common ancestor to all Bitbake servers
68
69 Derive this class in order to implement a BitBakeServer which is the
70 controlling stub for the actual server implementation
71
72"""
73class BitBakeBaseServer(object):
74 def initServer(self):
75 self.serverImpl = None # we ensure a runtime crash if not overloaded
76 self.connection = None
77 return
78
79 def addcooker(self, cooker):
80 self.cooker = cooker
81 self.serverImpl.addcooker(cooker)
82
83 def getServerIdleCB(self):
84 return self.serverImpl.register_idle_function
85
86 def saveConnectionDetails(self):
87 return
88
89 def detach(self):
90 return
91
92 def establishConnection(self, featureset):
93 raise "Must redefine the %s.establishConnection()" % self.__class__.__name__
94
95 def endSession(self):
96 self.connection.terminate()
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
new file mode 100644
index 0000000000..302ee5fc8a
--- /dev/null
+++ b/bitbake/lib/bb/server/process.py
@@ -0,0 +1,252 @@
1#
2# BitBake Process based server.
3#
4# Copyright (C) 2010 Bob Foerster <robert@erafx.com>
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19"""
20 This module implements a multiprocessing.Process based server for bitbake.
21"""
22
23import bb
24import bb.event
25import itertools
26import logging
27import multiprocessing
28import os
29import signal
30import sys
31import time
32import select
33from Queue import Empty
34from multiprocessing import Event, Process, util, Queue, Pipe, queues, Manager
35
36from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
37
38logger = logging.getLogger('BitBake')
39
40class ServerCommunicator():
41 def __init__(self, connection, event_handle, server):
42 self.connection = connection
43 self.event_handle = event_handle
44 self.server = server
45
46 def runCommand(self, command):
47 # @todo try/except
48 self.connection.send(command)
49
50 if not self.server.is_alive():
51 raise SystemExit
52
53 while True:
54 # don't let the user ctrl-c while we're waiting for a response
55 try:
56 if self.connection.poll(20):
57 return self.connection.recv()
58 else:
59 bb.fatal("Timeout while attempting to communicate with bitbake server")
60 except KeyboardInterrupt:
61 pass
62
63 def getEventHandle(self):
64 return self.event_handle.value
65
66class EventAdapter():
67 """
68 Adapter to wrap our event queue since the caller (bb.event) expects to
69 call a send() method, but our actual queue only has put()
70 """
71 def __init__(self, queue):
72 self.queue = queue
73
74 def send(self, event):
75 try:
76 self.queue.put(event)
77 except Exception as err:
78 print("EventAdapter puked: %s" % str(err))
79
80
81class ProcessServer(Process, BaseImplServer):
82 profile_filename = "profile.log"
83 profile_processed_filename = "profile.log.processed"
84
85 def __init__(self, command_channel, event_queue, featurelist):
86 BaseImplServer.__init__(self)
87 Process.__init__(self)
88 self.command_channel = command_channel
89 self.event_queue = event_queue
90 self.event = EventAdapter(event_queue)
91 self.featurelist = featurelist
92 self.quit = False
93
94 self.quitin, self.quitout = Pipe()
95 self.event_handle = multiprocessing.Value("i")
96
97 def run(self):
98 for event in bb.event.ui_queue:
99 self.event_queue.put(event)
100 self.event_handle.value = bb.event.register_UIHhandler(self)
101
102 bb.cooker.server_main(self.cooker, self.main)
103
104 def main(self):
105 # Ignore SIGINT within the server, as all SIGINT handling is done by
106 # the UI and communicated to us
107 self.quitin.close()
108 signal.signal(signal.SIGINT, signal.SIG_IGN)
109 while not self.quit:
110 try:
111 if self.command_channel.poll():
112 command = self.command_channel.recv()
113 self.runCommand(command)
114 if self.quitout.poll():
115 self.quitout.recv()
116 self.quit = True
117
118 self.idle_commands(.1, [self.event_queue._reader, self.command_channel, self.quitout])
119 except Exception:
120 logger.exception('Running command %s', command)
121
122 self.event_queue.close()
123 bb.event.unregister_UIHhandler(self.event_handle.value)
124 self.command_channel.close()
125 self.cooker.shutdown(True)
126
127 def idle_commands(self, delay, fds = []):
128 nextsleep = delay
129
130 for function, data in self._idlefuns.items():
131 try:
132 retval = function(self, data, False)
133 if retval is False:
134 del self._idlefuns[function]
135 nextsleep = None
136 elif retval is True:
137 nextsleep = None
138 elif isinstance(retval, float):
139 if (retval < nextsleep):
140 nextsleep = retval
141 elif nextsleep is None:
142 continue
143 else:
144 fds = fds + retval
145 except SystemExit:
146 raise
147 except Exception:
148 logger.exception('Running idle function')
149 del self._idlefuns[function]
150 self.quit = True
151
152 if nextsleep is not None:
153 select.select(fds,[],[],nextsleep)
154
155 def runCommand(self, command):
156 """
157 Run a cooker command on the server
158 """
159 self.command_channel.send(self.cooker.command.runCommand(command))
160
161 def stop(self):
162 self.quitin.send("quit")
163 self.quitin.close()
164
165class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
166 def __init__(self, serverImpl, ui_channel, event_queue):
167 self.procserver = serverImpl
168 self.ui_channel = ui_channel
169 self.event_queue = event_queue
170 self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle, self.procserver)
171 self.events = self.event_queue
172
173 def sigterm_terminate(self):
174 bb.error("UI received SIGTERM")
175 self.terminate()
176
177 def terminate(self):
178 def flushevents():
179 while True:
180 try:
181 event = self.event_queue.get(block=False)
182 except (Empty, IOError):
183 break
184 if isinstance(event, logging.LogRecord):
185 logger.handle(event)
186
187 signal.signal(signal.SIGINT, signal.SIG_IGN)
188 self.procserver.stop()
189
190 while self.procserver.is_alive():
191 flushevents()
192 self.procserver.join(0.1)
193
194 self.ui_channel.close()
195 self.event_queue.close()
196 self.event_queue.setexit()
197
198# Wrap Queue to provide API which isn't server implementation specific
199class ProcessEventQueue(multiprocessing.queues.Queue):
200 def __init__(self, maxsize):
201 multiprocessing.queues.Queue.__init__(self, maxsize)
202 self.exit = False
203
204 def setexit(self):
205 self.exit = True
206
207 def waitEvent(self, timeout):
208 if self.exit:
209 sys.exit(1)
210 try:
211 if not self.server.is_alive():
212 self.setexit()
213 return None
214 return self.get(True, timeout)
215 except Empty:
216 return None
217
218 def getEvent(self):
219 try:
220 if not self.server.is_alive():
221 self.setexit()
222 return None
223 return self.get(False)
224 except Empty:
225 return None
226
227
228class BitBakeServer(BitBakeBaseServer):
229 def initServer(self):
230 # establish communication channels. We use bidirectional pipes for
231 # ui <--> server command/response pairs
232 # and a queue for server -> ui event notifications
233 #
234 self.ui_channel, self.server_channel = Pipe()
235 self.event_queue = ProcessEventQueue(0)
236 self.serverImpl = ProcessServer(self.server_channel, self.event_queue, None)
237 self.event_queue.server = self.serverImpl
238
239 def detach(self):
240 self.serverImpl.start()
241 return
242
243 def establishConnection(self, featureset):
244
245 self.connection = BitBakeProcessServerConnection(self.serverImpl, self.ui_channel, self.event_queue)
246
247 _, error = self.connection.connection.runCommand(["setFeatures", featureset])
248 if error:
249 logger.error("Unable to set the cooker to the correct featureset: %s" % error)
250 raise BaseException(error)
251 signal.signal(signal.SIGTERM, lambda i, s: self.connection.sigterm_terminate())
252 return self.connection
diff --git a/bitbake/lib/bb/server/xmlrpc.py b/bitbake/lib/bb/server/xmlrpc.py
new file mode 100644
index 0000000000..10d4b5c779
--- /dev/null
+++ b/bitbake/lib/bb/server/xmlrpc.py
@@ -0,0 +1,383 @@
1#
2# BitBake XMLRPC Server
3#
4# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
5# Copyright (C) 2006 - 2008 Richard Purdie
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20"""
21 This module implements an xmlrpc server for BitBake.
22
23 Use this by deriving a class from BitBakeXMLRPCServer and then adding
24 methods which you want to "export" via XMLRPC. If the methods have the
25 prefix xmlrpc_, then registering those function will happen automatically,
26 if not, you need to call register_function.
27
28 Use register_idle_function() to add a function which the xmlrpc server
29 calls from within server_forever when no requests are pending. Make sure
30 that those functions are non-blocking or else you will introduce latency
31 in the server's main loop.
32"""
33
34import bb
35import xmlrpclib, sys
36from bb import daemonize
37from bb.ui import uievent
38import hashlib, time
39import socket
40import os, signal
41import threading
42try:
43 import cPickle as pickle
44except ImportError:
45 import pickle
46
47DEBUG = False
48
49from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
50import inspect, select, httplib
51
52from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
53
54class BBTransport(xmlrpclib.Transport):
55 def __init__(self, timeout):
56 self.timeout = timeout
57 self.connection_token = None
58 xmlrpclib.Transport.__init__(self)
59
60 # Modified from default to pass timeout to HTTPConnection
61 def make_connection(self, host):
62 #return an existing connection if possible. This allows
63 #HTTP/1.1 keep-alive.
64 if self._connection and host == self._connection[0]:
65 return self._connection[1]
66
67 # create a HTTP connection object from a host descriptor
68 chost, self._extra_headers, x509 = self.get_host_info(host)
69 #store the host argument along with the connection object
70 self._connection = host, httplib.HTTPConnection(chost, timeout=self.timeout)
71 return self._connection[1]
72
73 def set_connection_token(self, token):
74 self.connection_token = token
75
76 def send_content(self, h, body):
77 if self.connection_token:
78 h.putheader("Bitbake-token", self.connection_token)
79 xmlrpclib.Transport.send_content(self, h, body)
80
81def _create_server(host, port, timeout = 60):
82 t = BBTransport(timeout)
83 s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
84 return s, t
85
86class BitBakeServerCommands():
87
88 def __init__(self, server):
89 self.server = server
90 self.has_client = False
91
92 def registerEventHandler(self, host, port):
93 """
94 Register a remote UI Event Handler
95 """
96 s, t = _create_server(host, port)
97
98 # we don't allow connections if the cooker is running
99 if (self.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]):
100 return None
101
102 self.event_handle = bb.event.register_UIHhandler(s)
103 return self.event_handle
104
105 def unregisterEventHandler(self, handlerNum):
106 """
107 Unregister a remote UI Event Handler
108 """
109 return bb.event.unregister_UIHhandler(handlerNum)
110
111 def runCommand(self, command):
112 """
113 Run a cooker command on the server
114 """
115 return self.cooker.command.runCommand(command, self.server.readonly)
116
117 def getEventHandle(self):
118 return self.event_handle
119
120 def terminateServer(self):
121 """
122 Trigger the server to quit
123 """
124 self.server.quit = True
125 print("Server (cooker) exiting")
126 return
127
128 def addClient(self):
129 if self.has_client:
130 return None
131 token = hashlib.md5(str(time.time())).hexdigest()
132 self.server.set_connection_token(token)
133 self.has_client = True
134 return token
135
136 def removeClient(self):
137 if self.has_client:
138 self.server.set_connection_token(None)
139 self.has_client = False
140 if self.server.single_use:
141 self.server.quit = True
142
143# This request handler checks if the request has a "Bitbake-token" header
144# field (this comes from the client side) and compares it with its internal
145# "Bitbake-token" field (this comes from the server). If the two are not
146# equal, it is assumed that a client is trying to connect to the server
147# while another client is connected to the server. In this case, a 503 error
148# ("service unavailable") is returned to the client.
149class BitBakeXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
150 def __init__(self, request, client_address, server):
151 self.server = server
152 SimpleXMLRPCRequestHandler.__init__(self, request, client_address, server)
153
154 def do_POST(self):
155 try:
156 remote_token = self.headers["Bitbake-token"]
157 except:
158 remote_token = None
159 if remote_token != self.server.connection_token and remote_token != "observer":
160 self.report_503()
161 else:
162 if remote_token == "observer":
163 self.server.readonly = True
164 else:
165 self.server.readonly = False
166 SimpleXMLRPCRequestHandler.do_POST(self)
167
168 def report_503(self):
169 self.send_response(503)
170 response = 'No more client allowed'
171 self.send_header("Content-type", "text/plain")
172 self.send_header("Content-length", str(len(response)))
173 self.end_headers()
174 self.wfile.write(response)
175
176
177class XMLRPCProxyServer(BaseImplServer):
178 """ not a real working server, but a stub for a proxy server connection
179
180 """
181 def __init__(self, host, port):
182 self.host = host
183 self.port = port
184
185class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
186 # remove this when you're done with debugging
187 # allow_reuse_address = True
188
189 def __init__(self, interface):
190 """
191 Constructor
192 """
193 BaseImplServer.__init__(self)
194 if (interface[1] == 0): # anonymous port, not getting reused
195 self.single_use = True
196 # Use auto port configuration
197 if (interface[1] == -1):
198 interface = (interface[0], 0)
199 SimpleXMLRPCServer.__init__(self, interface,
200 requestHandler=BitBakeXMLRPCRequestHandler,
201 logRequests=False, allow_none=True)
202 self.host, self.port = self.socket.getsockname()
203 self.connection_token = None
204 #self.register_introspection_functions()
205 self.commands = BitBakeServerCommands(self)
206 self.autoregister_all_functions(self.commands, "")
207 self.interface = interface
208 self.single_use = False
209
210 def addcooker(self, cooker):
211 BaseImplServer.addcooker(self, cooker)
212 self.commands.cooker = cooker
213
214 def autoregister_all_functions(self, context, prefix):
215 """
216 Convenience method for registering all functions in the scope
217 of this class that start with a common prefix
218 """
219 methodlist = inspect.getmembers(context, inspect.ismethod)
220 for name, method in methodlist:
221 if name.startswith(prefix):
222 self.register_function(method, name[len(prefix):])
223
224
225 def serve_forever(self):
226 # Start the actual XMLRPC server
227 bb.cooker.server_main(self.cooker, self._serve_forever)
228
229 def _serve_forever(self):
230 """
231 Serve Requests. Overloaded to honor a quit command
232 """
233 self.quit = False
234 while not self.quit:
235 fds = [self]
236 nextsleep = 0.1
237 for function, data in self._idlefuns.items():
238 try:
239 retval = function(self, data, False)
240 if retval is False:
241 del self._idlefuns[function]
242 elif retval is True:
243 nextsleep = 0
244 elif isinstance(retval, float):
245 if (retval < nextsleep):
246 nextsleep = retval
247 else:
248 fds = fds + retval
249 except SystemExit:
250 raise
251 except:
252 import traceback
253 traceback.print_exc()
254 pass
255
256 socktimeout = self.socket.gettimeout() or nextsleep
257 socktimeout = min(socktimeout, nextsleep)
258 # Mirror what BaseServer handle_request would do
259 try:
260 fd_sets = select.select(fds, [], [], socktimeout)
261 if fd_sets[0] and self in fd_sets[0]:
262 self._handle_request_noblock()
263 except IOError:
264 # we ignore interrupted calls
265 pass
266
267 # Tell idle functions we're exiting
268 for function, data in self._idlefuns.items():
269 try:
270 retval = function(self, data, True)
271 except:
272 pass
273 self.server_close()
274 return
275
276 def set_connection_token(self, token):
277 self.connection_token = token
278
279class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
280 def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = []):
281 self.connection, self.transport = _create_server(serverImpl.host, serverImpl.port)
282 self.clientinfo = clientinfo
283 self.serverImpl = serverImpl
284 self.observer_only = observer_only
285 self.featureset = featureset
286
287 def connect(self, token = None):
288 if token is None:
289 if self.observer_only:
290 token = "observer"
291 else:
292 token = self.connection.addClient()
293
294 if token is None:
295 return None
296
297 self.transport.set_connection_token(token)
298
299 self.events = uievent.BBUIEventQueue(self.connection, self.clientinfo)
300 for event in bb.event.ui_queue:
301 self.events.queue_event(event)
302
303 _, error = self.connection.runCommand(["setFeatures", self.featureset])
304 if error:
305 # no need to log it here, the error shall be sent to the client
306 raise BaseException(error)
307
308 return self
309
310 def removeClient(self):
311 if not self.observer_only:
312 self.connection.removeClient()
313
314 def terminate(self):
315 # Don't wait for server indefinitely
316 import socket
317 socket.setdefaulttimeout(2)
318 try:
319 self.events.system_quit()
320 except:
321 pass
322 try:
323 self.connection.removeClient()
324 except:
325 pass
326
327class BitBakeServer(BitBakeBaseServer):
328 def initServer(self, interface = ("localhost", 0)):
329 self.interface = interface
330 self.serverImpl = XMLRPCServer(interface)
331
332 def detach(self):
333 daemonize.createDaemon(self.serverImpl.serve_forever, "bitbake-cookerdaemon.log")
334 del self.cooker
335
336 def establishConnection(self, featureset):
337 self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, self.interface, False, featureset)
338 return self.connection.connect()
339
340 def set_connection_token(self, token):
341 self.connection.transport.set_connection_token(token)
342
343class BitBakeXMLRPCClient(BitBakeBaseServer):
344
345 def __init__(self, observer_only = False, token = None):
346 self.token = token
347
348 self.observer_only = observer_only
349 # if we need extra caches, just tell the server to load them all
350 pass
351
352 def saveConnectionDetails(self, remote):
353 self.remote = remote
354
355 def establishConnection(self, featureset):
356 # The format of "remote" must be "server:port"
357 try:
358 [host, port] = self.remote.split(":")
359 port = int(port)
360 except Exception as e:
361 bb.warn("Failed to read remote definition (%s)" % str(e))
362 raise e
363
364 # We need our IP for the server connection. We get the IP
365 # by trying to connect with the server
366 try:
367 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
368 s.connect((host, port))
369 ip = s.getsockname()[0]
370 s.close()
371 except Exception as e:
372 bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
373 raise e
374 try:
375 self.serverImpl = XMLRPCProxyServer(host, port)
376 self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
377 return self.connection.connect(self.token)
378 except Exception as e:
379 bb.warn("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
380 raise e
381
382 def endSession(self):
383 self.connection.removeClient()
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py
new file mode 100644
index 0000000000..1dd8d54bdb
--- /dev/null
+++ b/bitbake/lib/bb/shell.py
@@ -0,0 +1,820 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3##########################################################################
4#
5# Copyright (C) 2005-2006 Michael 'Mickey' Lauer <mickey@Vanille.de>
6# Copyright (C) 2005-2006 Vanille Media
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21##########################################################################
22#
23# Thanks to:
24# * Holger Freyther <zecke@handhelds.org>
25# * Justin Patrin <papercrane@reversefold.com>
26#
27##########################################################################
28
29"""
30BitBake Shell
31
32IDEAS:
33 * list defined tasks per package
34 * list classes
35 * toggle force
36 * command to reparse just one (or more) bbfile(s)
37 * automatic check if reparsing is necessary (inotify?)
38 * frontend for bb file manipulation
39 * more shell-like features:
40 - output control, i.e. pipe output into grep, sort, etc.
41 - job control, i.e. bring running commands into background and foreground
42 * start parsing in background right after startup
43 * ncurses interface
44
45PROBLEMS:
46 * force doesn't always work
47 * readline completion for commands with more than one parameters
48
49"""
50
51##########################################################################
52# Import and setup global variables
53##########################################################################
54
55from __future__ import print_function
56from functools import reduce
57try:
58 set
59except NameError:
60 from sets import Set as set
61import sys, os, readline, socket, httplib, urllib, commands, popen2, shlex, Queue, fnmatch
62from bb import data, parse, build, cache, taskdata, runqueue, providers as Providers
63
64__version__ = "0.5.3.1"
65__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
66Type 'help' for more information, press CTRL-D to exit.""" % __version__
67
68cmds = {}
69leave_mainloop = False
70last_exception = None
71cooker = None
72parsed = False
73debug = os.environ.get( "BBSHELL_DEBUG", "" )
74
75##########################################################################
76# Class BitBakeShellCommands
77##########################################################################
78
79class BitBakeShellCommands:
80 """This class contains the valid commands for the shell"""
81
82 def __init__( self, shell ):
83 """Register all the commands"""
84 self._shell = shell
85 for attr in BitBakeShellCommands.__dict__:
86 if not attr.startswith( "_" ):
87 if attr.endswith( "_" ):
88 command = attr[:-1].lower()
89 else:
90 command = attr[:].lower()
91 method = getattr( BitBakeShellCommands, attr )
92 debugOut( "registering command '%s'" % command )
93 # scan number of arguments
94 usage = getattr( method, "usage", "" )
95 if usage != "<...>":
96 numArgs = len( usage.split() )
97 else:
98 numArgs = -1
99 shell.registerCommand( command, method, numArgs, "%s %s" % ( command, usage ), method.__doc__ )
100
101 def _checkParsed( self ):
102 if not parsed:
103 print("SHELL: This command needs to parse bbfiles...")
104 self.parse( None )
105
106 def _findProvider( self, item ):
107 self._checkParsed()
108 # Need to use taskData for this information
109 preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
110 if not preferred: preferred = item
111 try:
112 lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
113 except KeyError:
114 if item in cooker.status.providers:
115 pf = cooker.status.providers[item][0]
116 else:
117 pf = None
118 return pf
119
120 def alias( self, params ):
121 """Register a new name for a command"""
122 new, old = params
123 if not old in cmds:
124 print("ERROR: Command '%s' not known" % old)
125 else:
126 cmds[new] = cmds[old]
127 print("OK")
128 alias.usage = "<alias> <command>"
129
130 def buffer( self, params ):
131 """Dump specified output buffer"""
132 index = params[0]
133 print(self._shell.myout.buffer( int( index ) ))
134 buffer.usage = "<index>"
135
136 def buffers( self, params ):
137 """Show the available output buffers"""
138 commands = self._shell.myout.bufferedCommands()
139 if not commands:
140 print("SHELL: No buffered commands available yet. Start doing something.")
141 else:
142 print("="*35, "Available Output Buffers", "="*27)
143 for index, cmd in enumerate( commands ):
144 print("| %s %s" % ( str( index ).ljust( 3 ), cmd ))
145 print("="*88)
146
147 def build( self, params, cmd = "build" ):
148 """Build a providee"""
149 global last_exception
150 globexpr = params[0]
151 self._checkParsed()
152 names = globfilter( cooker.status.pkg_pn, globexpr )
153 if len( names ) == 0: names = [ globexpr ]
154 print("SHELL: Building %s" % ' '.join( names ))
155
156 td = taskdata.TaskData(cooker.configuration.abort)
157 localdata = data.createCopy(cooker.configuration.data)
158 data.update_data(localdata)
159 data.expandKeys(localdata)
160
161 try:
162 tasks = []
163 for name in names:
164 td.add_provider(localdata, cooker.status, name)
165 providers = td.get_provider(name)
166
167 if len(providers) == 0:
168 raise Providers.NoProvider
169
170 tasks.append([name, "do_%s" % cmd])
171
172 td.add_unresolved(localdata, cooker.status)
173
174 rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
175 rq.prepare_runqueue()
176 rq.execute_runqueue()
177
178 except Providers.NoProvider:
179 print("ERROR: No Provider")
180 last_exception = Providers.NoProvider
181
182 except runqueue.TaskFailure as fnids:
183 last_exception = runqueue.TaskFailure
184
185 except build.FuncFailed as e:
186 print("ERROR: Couldn't build '%s'" % names)
187 last_exception = e
188
189
190 build.usage = "<providee>"
191
192 def clean( self, params ):
193 """Clean a providee"""
194 self.build( params, "clean" )
195 clean.usage = "<providee>"
196
197 def compile( self, params ):
198 """Execute 'compile' on a providee"""
199 self.build( params, "compile" )
200 compile.usage = "<providee>"
201
202 def configure( self, params ):
203 """Execute 'configure' on a providee"""
204 self.build( params, "configure" )
205 configure.usage = "<providee>"
206
207 def install( self, params ):
208 """Execute 'install' on a providee"""
209 self.build( params, "install" )
210 install.usage = "<providee>"
211
212 def edit( self, params ):
213 """Call $EDITOR on a providee"""
214 name = params[0]
215 bbfile = self._findProvider( name )
216 if bbfile is not None:
217 os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) )
218 else:
219 print("ERROR: Nothing provides '%s'" % name)
220 edit.usage = "<providee>"
221
222 def environment( self, params ):
223 """Dump out the outer BitBake environment"""
224 cooker.showEnvironment()
225
226 def exit_( self, params ):
227 """Leave the BitBake Shell"""
228 debugOut( "setting leave_mainloop to true" )
229 global leave_mainloop
230 leave_mainloop = True
231
232 def fetch( self, params ):
233 """Fetch a providee"""
234 self.build( params, "fetch" )
235 fetch.usage = "<providee>"
236
237 def fileBuild( self, params, cmd = "build" ):
238 """Parse and build a .bb file"""
239 global last_exception
240 name = params[0]
241 bf = completeFilePath( name )
242 print("SHELL: Calling '%s' on '%s'" % ( cmd, bf ))
243
244 try:
245 cooker.buildFile(bf, cmd)
246 except parse.ParseError:
247 print("ERROR: Unable to open or parse '%s'" % bf)
248 except build.FuncFailed as e:
249 print("ERROR: Couldn't build '%s'" % name)
250 last_exception = e
251
252 fileBuild.usage = "<bbfile>"
253
254 def fileClean( self, params ):
255 """Clean a .bb file"""
256 self.fileBuild( params, "clean" )
257 fileClean.usage = "<bbfile>"
258
259 def fileEdit( self, params ):
260 """Call $EDITOR on a .bb file"""
261 name = params[0]
262 os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), completeFilePath( name ) ) )
263 fileEdit.usage = "<bbfile>"
264
265 def fileRebuild( self, params ):
266 """Rebuild (clean & build) a .bb file"""
267 self.fileBuild( params, "rebuild" )
268 fileRebuild.usage = "<bbfile>"
269
270 def fileReparse( self, params ):
271 """(re)Parse a bb file"""
272 bbfile = params[0]
273 print("SHELL: Parsing '%s'" % bbfile)
274 parse.update_mtime( bbfile )
275 cooker.parser.reparse(bbfile)
276 if False: #fromCache:
277 print("SHELL: File has not been updated, not reparsing")
278 else:
279 print("SHELL: Parsed")
280 fileReparse.usage = "<bbfile>"
281
282 def abort( self, params ):
283 """Toggle abort task execution flag (see bitbake -k)"""
284 cooker.configuration.abort = not cooker.configuration.abort
285 print("SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort ))
286
287 def force( self, params ):
288 """Toggle force task execution flag (see bitbake -f)"""
289 cooker.configuration.force = not cooker.configuration.force
290 print("SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force ))
291
292 def help( self, params ):
293 """Show a comprehensive list of commands and their purpose"""
294 print("="*30, "Available Commands", "="*30)
295 for cmd in sorted(cmds):
296 function, numparams, usage, helptext = cmds[cmd]
297 print("| %s | %s" % (usage.ljust(30), helptext))
298 print("="*78)
299
300 def lastError( self, params ):
301 """Show the reason or log that was produced by the last BitBake event exception"""
302 if last_exception is None:
303 print("SHELL: No Errors yet (Phew)...")
304 else:
305 reason, event = last_exception.args
306 print("SHELL: Reason for the last error: '%s'" % reason)
307 if ':' in reason:
308 msg, filename = reason.split( ':' )
309 filename = filename.strip()
310 print("SHELL: Dumping log file for last error:")
311 try:
312 print(open( filename ).read())
313 except IOError:
314 print("ERROR: Couldn't open '%s'" % filename)
315
316 def match( self, params ):
317 """Dump all files or providers matching a glob expression"""
318 what, globexpr = params
319 if what == "files":
320 self._checkParsed()
321 for key in globfilter( cooker.status.pkg_fn, globexpr ): print(key)
322 elif what == "providers":
323 self._checkParsed()
324 for key in globfilter( cooker.status.pkg_pn, globexpr ): print(key)
325 else:
326 print("Usage: match %s" % self.print_.usage)
327 match.usage = "<files|providers> <glob>"
328
329 def new( self, params ):
330 """Create a new .bb file and open the editor"""
331 dirname, filename = params
332 packages = '/'.join( data.getVar( "BBFILES", cooker.configuration.data, 1 ).split('/')[:-2] )
333 fulldirname = "%s/%s" % ( packages, dirname )
334
335 if not os.path.exists( fulldirname ):
336 print("SHELL: Creating '%s'" % fulldirname)
337 os.mkdir( fulldirname )
338 if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ):
339 if os.path.exists( "%s/%s" % ( fulldirname, filename ) ):
340 print("SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename ))
341 return False
342 print("SHELL: Creating '%s/%s'" % ( fulldirname, filename ))
343 newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" )
344 print("""DESCRIPTION = ""
345SECTION = ""
346AUTHOR = ""
347HOMEPAGE = ""
348MAINTAINER = ""
349LICENSE = "GPL"
350PR = "r0"
351
352SRC_URI = ""
353
354#inherit base
355
356#do_configure() {
357#
358#}
359
360#do_compile() {
361#
362#}
363
364#do_stage() {
365#
366#}
367
368#do_install() {
369#
370#}
371""", file=newpackage)
372 newpackage.close()
373 os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) )
374 new.usage = "<directory> <filename>"
375
376 def package( self, params ):
377 """Execute 'package' on a providee"""
378 self.build( params, "package" )
379 package.usage = "<providee>"
380
381 def pasteBin( self, params ):
382 """Send a command + output buffer to the pastebin at http://rafb.net/paste"""
383 index = params[0]
384 contents = self._shell.myout.buffer( int( index ) )
385 sendToPastebin( "output of " + params[0], contents )
386 pasteBin.usage = "<index>"
387
388 def pasteLog( self, params ):
389 """Send the last event exception error log (if there is one) to http://rafb.net/paste"""
390 if last_exception is None:
391 print("SHELL: No Errors yet (Phew)...")
392 else:
393 reason, event = last_exception.args
394 print("SHELL: Reason for the last error: '%s'" % reason)
395 if ':' in reason:
396 msg, filename = reason.split( ':' )
397 filename = filename.strip()
398 print("SHELL: Pasting log file to pastebin...")
399
400 file = open( filename ).read()
401 sendToPastebin( "contents of " + filename, file )
402
403 def patch( self, params ):
404 """Execute 'patch' command on a providee"""
405 self.build( params, "patch" )
406 patch.usage = "<providee>"
407
408 def parse( self, params ):
409 """(Re-)parse .bb files and calculate the dependency graph"""
410 cooker.status = cache.CacheData(cooker.caches_array)
411 ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or ""
412 cooker.status.ignored_dependencies = set( ignore.split() )
413 cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) )
414
415 (filelist, masked) = cooker.collect_bbfiles()
416 cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback)
417 cooker.buildDepgraph()
418 global parsed
419 parsed = True
420 print()
421
422 def reparse( self, params ):
423 """(re)Parse a providee's bb file"""
424 bbfile = self._findProvider( params[0] )
425 if bbfile is not None:
426 print("SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] ))
427 self.fileReparse( [ bbfile ] )
428 else:
429 print("ERROR: Nothing provides '%s'" % params[0])
430 reparse.usage = "<providee>"
431
432 def getvar( self, params ):
433 """Dump the contents of an outer BitBake environment variable"""
434 var = params[0]
435 value = data.getVar( var, cooker.configuration.data, 1 )
436 print(value)
437 getvar.usage = "<variable>"
438
439 def peek( self, params ):
440 """Dump contents of variable defined in providee's metadata"""
441 name, var = params
442 bbfile = self._findProvider( name )
443 if bbfile is not None:
444 the_data = cache.Cache.loadDataFull(bbfile, cooker.configuration.data)
445 value = the_data.getVar( var, 1 )
446 print(value)
447 else:
448 print("ERROR: Nothing provides '%s'" % name)
449 peek.usage = "<providee> <variable>"
450
451 def poke( self, params ):
452 """Set contents of variable defined in providee's metadata"""
453 name, var, value = params
454 bbfile = self._findProvider( name )
455 if bbfile is not None:
456 print("ERROR: Sorry, this functionality is currently broken")
457 #d = cooker.pkgdata[bbfile]
458 #data.setVar( var, value, d )
459
460 # mark the change semi persistant
461 #cooker.pkgdata.setDirty(bbfile, d)
462 #print "OK"
463 else:
464 print("ERROR: Nothing provides '%s'" % name)
465 poke.usage = "<providee> <variable> <value>"
466
467 def print_( self, params ):
468 """Dump all files or providers"""
469 what = params[0]
470 if what == "files":
471 self._checkParsed()
472 for key in cooker.status.pkg_fn: print(key)
473 elif what == "providers":
474 self._checkParsed()
475 for key in cooker.status.providers: print(key)
476 else:
477 print("Usage: print %s" % self.print_.usage)
478 print_.usage = "<files|providers>"
479
480 def python( self, params ):
481 """Enter the expert mode - an interactive BitBake Python Interpreter"""
482 sys.ps1 = "EXPERT BB>>> "
483 sys.ps2 = "EXPERT BB... "
484 import code
485 interpreter = code.InteractiveConsole( dict( globals() ) )
486 interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version )
487
488 def showdata( self, params ):
489 """Execute 'showdata' on a providee"""
490 cooker.showEnvironment(None, params)
491 showdata.usage = "<providee>"
492
493 def setVar( self, params ):
494 """Set an outer BitBake environment variable"""
495 var, value = params
496 data.setVar( var, value, cooker.configuration.data )
497 print("OK")
498 setVar.usage = "<variable> <value>"
499
500 def rebuild( self, params ):
501 """Clean and rebuild a .bb file or a providee"""
502 self.build( params, "clean" )
503 self.build( params, "build" )
504 rebuild.usage = "<providee>"
505
506 def shell( self, params ):
507 """Execute a shell command and dump the output"""
508 if params != "":
509 print(commands.getoutput( " ".join( params ) ))
510 shell.usage = "<...>"
511
512 def stage( self, params ):
513 """Execute 'stage' on a providee"""
514 self.build( params, "populate_staging" )
515 stage.usage = "<providee>"
516
517 def status( self, params ):
518 """<just for testing>"""
519 print("-" * 78)
520 print("building list = '%s'" % cooker.building_list)
521 print("build path = '%s'" % cooker.build_path)
522 print("consider_msgs_cache = '%s'" % cooker.consider_msgs_cache)
523 print("build stats = '%s'" % cooker.stats)
524 if last_exception is not None: print("last_exception = '%s'" % repr( last_exception.args ))
525 print("memory output contents = '%s'" % self._shell.myout._buffer)
526
527 def test( self, params ):
528 """<just for testing>"""
529 print("testCommand called with '%s'" % params)
530
531 def unpack( self, params ):
532 """Execute 'unpack' on a providee"""
533 self.build( params, "unpack" )
534 unpack.usage = "<providee>"
535
536 def which( self, params ):
537 """Computes the providers for a given providee"""
538 # Need to use taskData for this information
539 item = params[0]
540
541 self._checkParsed()
542
543 preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
544 if not preferred: preferred = item
545
546 try:
547 lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
548 except KeyError:
549 lv, lf, pv, pf = (None,)*4
550
551 try:
552 providers = cooker.status.providers[item]
553 except KeyError:
554 print("SHELL: ERROR: Nothing provides", preferred)
555 else:
556 for provider in providers:
557 if provider == pf: provider = " (***) %s" % provider
558 else: provider = " %s" % provider
559 print(provider)
560 which.usage = "<providee>"
561
562##########################################################################
563# Common helper functions
564##########################################################################
565
566def completeFilePath( bbfile ):
567 """Get the complete bbfile path"""
568 if not cooker.status: return bbfile
569 if not cooker.status.pkg_fn: return bbfile
570 for key in cooker.status.pkg_fn:
571 if key.endswith( bbfile ):
572 return key
573 return bbfile
574
575def sendToPastebin( desc, content ):
576 """Send content to http://oe.pastebin.com"""
577 mydata = {}
578 mydata["lang"] = "Plain Text"
579 mydata["desc"] = desc
580 mydata["cvt_tabs"] = "No"
581 mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" )
582 mydata["text"] = content
583 params = urllib.urlencode( mydata )
584 headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
585
586 host = "rafb.net"
587 conn = httplib.HTTPConnection( "%s:80" % host )
588 conn.request("POST", "/paste/paste.php", params, headers )
589
590 response = conn.getresponse()
591 conn.close()
592
593 if response.status == 302:
594 location = response.getheader( "location" ) or "unknown"
595 print("SHELL: Pasted to http://%s%s" % ( host, location ))
596 else:
597 print("ERROR: %s %s" % ( response.status, response.reason ))
598
599def completer( text, state ):
600 """Return a possible readline completion"""
601 debugOut( "completer called with text='%s', state='%d'" % ( text, state ) )
602
603 if state == 0:
604 line = readline.get_line_buffer()
605 if " " in line:
606 line = line.split()
607 # we are in second (or more) argument
608 if line[0] in cmds and hasattr( cmds[line[0]][0], "usage" ): # known command and usage
609 u = getattr( cmds[line[0]][0], "usage" ).split()[0]
610 if u == "<variable>":
611 allmatches = cooker.configuration.data.keys()
612 elif u == "<bbfile>":
613 if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
614 else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn ]
615 elif u == "<providee>":
616 if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
617 else: allmatches = cooker.status.providers.iterkeys()
618 else: allmatches = [ "(No tab completion available for this command)" ]
619 else: allmatches = [ "(No tab completion available for this command)" ]
620 else:
621 # we are in first argument
622 allmatches = cmds.iterkeys()
623
624 completer.matches = [ x for x in allmatches if x[:len(text)] == text ]
625 #print "completer.matches = '%s'" % completer.matches
626 if len( completer.matches ) > state:
627 return completer.matches[state]
628 else:
629 return None
630
631def debugOut( text ):
632 if debug:
633 sys.stderr.write( "( %s )\n" % text )
634
635def columnize( alist, width = 80 ):
636 """
637 A word-wrap function that preserves existing line breaks
638 and most spaces in the text. Expects that existing line
639 breaks are posix newlines (\n).
640 """
641 return reduce(lambda line, word, width=width: '%s%s%s' %
642 (line,
643 ' \n'[(len(line[line.rfind('\n')+1:])
644 + len(word.split('\n', 1)[0]
645 ) >= width)],
646 word),
647 alist
648 )
649
650def globfilter( names, pattern ):
651 return fnmatch.filter( names, pattern )
652
653##########################################################################
654# Class MemoryOutput
655##########################################################################
656
657class MemoryOutput:
658 """File-like output class buffering the output of the last 10 commands"""
659 def __init__( self, delegate ):
660 self.delegate = delegate
661 self._buffer = []
662 self.text = []
663 self._command = None
664
665 def startCommand( self, command ):
666 self._command = command
667 self.text = []
668 def endCommand( self ):
669 if self._command is not None:
670 if len( self._buffer ) == 10: del self._buffer[0]
671 self._buffer.append( ( self._command, self.text ) )
672 def removeLast( self ):
673 if self._buffer:
674 del self._buffer[ len( self._buffer ) - 1 ]
675 self.text = []
676 self._command = None
677 def lastBuffer( self ):
678 if self._buffer:
679 return self._buffer[ len( self._buffer ) -1 ][1]
680 def bufferedCommands( self ):
681 return [ cmd for cmd, output in self._buffer ]
682 def buffer( self, i ):
683 if i < len( self._buffer ):
684 return "BB>> %s\n%s" % ( self._buffer[i][0], "".join( self._buffer[i][1] ) )
685 else: return "ERROR: Invalid buffer number. Buffer needs to be in (0, %d)" % ( len( self._buffer ) - 1 )
686 def write( self, text ):
687 if self._command is not None and text != "BB>> ": self.text.append( text )
688 if self.delegate is not None: self.delegate.write( text )
689 def flush( self ):
690 return self.delegate.flush()
691 def fileno( self ):
692 return self.delegate.fileno()
693 def isatty( self ):
694 return self.delegate.isatty()
695
696##########################################################################
697# Class BitBakeShell
698##########################################################################
699
700class BitBakeShell:
701
702 def __init__( self ):
703 """Register commands and set up readline"""
704 self.commandQ = Queue.Queue()
705 self.commands = BitBakeShellCommands( self )
706 self.myout = MemoryOutput( sys.stdout )
707 self.historyfilename = os.path.expanduser( "~/.bbsh_history" )
708 self.startupfilename = os.path.expanduser( "~/.bbsh_startup" )
709
710 readline.set_completer( completer )
711 readline.set_completer_delims( " " )
712 readline.parse_and_bind("tab: complete")
713
714 try:
715 readline.read_history_file( self.historyfilename )
716 except IOError:
717 pass # It doesn't exist yet.
718
719 print(__credits__)
720
721 def cleanup( self ):
722 """Write readline history and clean up resources"""
723 debugOut( "writing command history" )
724 try:
725 readline.write_history_file( self.historyfilename )
726 except:
727 print("SHELL: Unable to save command history")
728
729 def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ):
730 """Register a command"""
731 if usage == "": usage = command
732 if helptext == "": helptext = function.__doc__ or "<not yet documented>"
733 cmds[command] = ( function, numparams, usage, helptext )
734
735 def processCommand( self, command, params ):
736 """Process a command. Check number of params and print a usage string, if appropriate"""
737 debugOut( "processing command '%s'..." % command )
738 try:
739 function, numparams, usage, helptext = cmds[command]
740 except KeyError:
741 print("SHELL: ERROR: '%s' command is not a valid command." % command)
742 self.myout.removeLast()
743 else:
744 if (numparams != -1) and (not len( params ) == numparams):
745 print("Usage: '%s'" % usage)
746 return
747
748 result = function( self.commands, params )
749 debugOut( "result was '%s'" % result )
750
751 def processStartupFile( self ):
752 """Read and execute all commands found in $HOME/.bbsh_startup"""
753 if os.path.exists( self.startupfilename ):
754 startupfile = open( self.startupfilename, "r" )
755 for cmdline in startupfile:
756 debugOut( "processing startup line '%s'" % cmdline )
757 if not cmdline:
758 continue
759 if "|" in cmdline:
760 print("ERROR: '|' in startup file is not allowed. Ignoring line")
761 continue
762 self.commandQ.put( cmdline.strip() )
763
764 def main( self ):
765 """The main command loop"""
766 while not leave_mainloop:
767 try:
768 if self.commandQ.empty():
769 sys.stdout = self.myout.delegate
770 cmdline = raw_input( "BB>> " )
771 sys.stdout = self.myout
772 else:
773 cmdline = self.commandQ.get()
774 if cmdline:
775 allCommands = cmdline.split( ';' )
776 for command in allCommands:
777 pipecmd = None
778 #
779 # special case for expert mode
780 if command == 'python':
781 sys.stdout = self.myout.delegate
782 self.processCommand( command, "" )
783 sys.stdout = self.myout
784 else:
785 self.myout.startCommand( command )
786 if '|' in command: # disable output
787 command, pipecmd = command.split( '|' )
788 delegate = self.myout.delegate
789 self.myout.delegate = None
790 tokens = shlex.split( command, True )
791 self.processCommand( tokens[0], tokens[1:] or "" )
792 self.myout.endCommand()
793 if pipecmd is not None: # restore output
794 self.myout.delegate = delegate
795
796 pipe = popen2.Popen4( pipecmd )
797 pipe.tochild.write( "\n".join( self.myout.lastBuffer() ) )
798 pipe.tochild.close()
799 sys.stdout.write( pipe.fromchild.read() )
800 #
801 except EOFError:
802 print()
803 return
804 except KeyboardInterrupt:
805 print()
806
807##########################################################################
808# Start function - called from the BitBake command line utility
809##########################################################################
810
811def start( aCooker ):
812 global cooker
813 cooker = aCooker
814 bbshell = BitBakeShell()
815 bbshell.processStartupFile()
816 bbshell.main()
817 bbshell.cleanup()
818
819if __name__ == "__main__":
820 print("SHELL: Sorry, this program should only be called by BitBake.")
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
new file mode 100644
index 0000000000..e77be6abfb
--- /dev/null
+++ b/bitbake/lib/bb/siggen.py
@@ -0,0 +1,486 @@
1import hashlib
2import logging
3import os
4import re
5import tempfile
6import bb.data
7
8logger = logging.getLogger('BitBake.SigGen')
9
10try:
11 import cPickle as pickle
12except ImportError:
13 import pickle
14 logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
15
16def init(d):
17 siggens = [obj for obj in globals().itervalues()
18 if type(obj) is type and issubclass(obj, SignatureGenerator)]
19
20 desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
21 for sg in siggens:
22 if desired == sg.name:
23 return sg(d)
24 break
25 else:
26 logger.error("Invalid signature generator '%s', using default 'noop'\n"
27 "Available generators: %s", desired,
28 ', '.join(obj.name for obj in siggens))
29 return SignatureGenerator(d)
30
31class SignatureGenerator(object):
32 """
33 """
34 name = "noop"
35
36 def __init__(self, data):
37 self.taskhash = {}
38 self.runtaskdeps = {}
39 self.file_checksum_values = {}
40
41 def finalise(self, fn, d, varient):
42 return
43
44 def get_taskhash(self, fn, task, deps, dataCache):
45 return "0"
46
47 def set_taskdata(self, hashes, deps, checksum):
48 return
49
50 def stampfile(self, stampbase, file_name, taskname, extrainfo):
51 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
52
53 def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
54 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
55
56 def dump_sigtask(self, fn, task, stampbase, runtime):
57 return
58
59 def invalidate_task(self, task, d, fn):
60 bb.build.del_stamp(task, d, fn)
61
62 def dump_sigs(self, dataCache, options):
63 return
64
65 def get_taskdata(self):
66 return (self.runtaskdeps, self.taskhash, self.file_checksum_values)
67
68 def set_taskdata(self, data):
69 self.runtaskdeps, self.taskhash, self.file_checksum_values = data
70
71
72class SignatureGeneratorBasic(SignatureGenerator):
73 """
74 """
75 name = "basic"
76
77 def __init__(self, data):
78 self.basehash = {}
79 self.taskhash = {}
80 self.taskdeps = {}
81 self.runtaskdeps = {}
82 self.file_checksum_values = {}
83 self.gendeps = {}
84 self.lookupcache = {}
85 self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
86 self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
87 self.taskwhitelist = None
88 self.init_rundepcheck(data)
89
90 def init_rundepcheck(self, data):
91 self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
92 if self.taskwhitelist:
93 self.twl = re.compile(self.taskwhitelist)
94 else:
95 self.twl = None
96
97 def _build_data(self, fn, d):
98
99 tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d)
100
101 taskdeps = {}
102 basehash = {}
103
104 for task in tasklist:
105 data = lookupcache[task]
106
107 if data is None:
108 bb.error("Task %s from %s seems to be empty?!" % (task, fn))
109 data = ''
110
111 gendeps[task] -= self.basewhitelist
112 newdeps = gendeps[task]
113 seen = set()
114 while newdeps:
115 nextdeps = newdeps
116 seen |= nextdeps
117 newdeps = set()
118 for dep in nextdeps:
119 if dep in self.basewhitelist:
120 continue
121 gendeps[dep] -= self.basewhitelist
122 newdeps |= gendeps[dep]
123 newdeps -= seen
124
125 alldeps = sorted(seen)
126 for dep in alldeps:
127 data = data + dep
128 var = lookupcache[dep]
129 if var is not None:
130 data = data + str(var)
131 self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
132 taskdeps[task] = alldeps
133
134 self.taskdeps[fn] = taskdeps
135 self.gendeps[fn] = gendeps
136 self.lookupcache[fn] = lookupcache
137
138 return taskdeps
139
140 def finalise(self, fn, d, variant):
141
142 if variant:
143 fn = "virtual:" + variant + ":" + fn
144
145 try:
146 taskdeps = self._build_data(fn, d)
147 except:
148 bb.note("Error during finalise of %s" % fn)
149 raise
150
151 #Slow but can be useful for debugging mismatched basehashes
152 #for task in self.taskdeps[fn]:
153 # self.dump_sigtask(fn, task, d.getVar("STAMP", True), False)
154
155 for task in taskdeps:
156 d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
157
158 def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
159 # Return True if we should keep the dependency, False to drop it
160 # We only manipulate the dependencies for packages not in the whitelist
161 if self.twl and not self.twl.search(recipename):
162 # then process the actual dependencies
163 if self.twl.search(depname):
164 return False
165 return True
166
167 def read_taint(self, fn, task, stampbase):
168 taint = None
169 try:
170 with open(stampbase + '.' + task + '.taint', 'r') as taintf:
171 taint = taintf.read()
172 except IOError:
173 pass
174 return taint
175
176 def get_taskhash(self, fn, task, deps, dataCache):
177 k = fn + "." + task
178 data = dataCache.basetaskhash[k]
179 self.runtaskdeps[k] = []
180 self.file_checksum_values[k] = {}
181 recipename = dataCache.pkg_fn[fn]
182 for dep in sorted(deps, key=clean_basepath):
183 depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
184 if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
185 continue
186 if dep not in self.taskhash:
187 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
188 data = data + self.taskhash[dep]
189 self.runtaskdeps[k].append(dep)
190
191 if task in dataCache.file_checksums[fn]:
192 checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
193 for (f,cs) in checksums:
194 self.file_checksum_values[k][f] = cs
195 if cs:
196 data = data + cs
197
198 taint = self.read_taint(fn, task, dataCache.stamp[fn])
199 if taint:
200 data = data + taint
201 logger.warn("%s is tainted from a forced run" % k)
202
203 h = hashlib.md5(data).hexdigest()
204 self.taskhash[k] = h
205 #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
206 return h
207
208 def dump_sigtask(self, fn, task, stampbase, runtime):
209 k = fn + "." + task
210 if runtime == "customfile":
211 sigfile = stampbase
212 elif runtime and k in self.taskhash:
213 sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
214 else:
215 sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
216
217 bb.utils.mkdirhier(os.path.dirname(sigfile))
218
219 data = {}
220 data['basewhitelist'] = self.basewhitelist
221 data['taskwhitelist'] = self.taskwhitelist
222 data['taskdeps'] = self.taskdeps[fn][task]
223 data['basehash'] = self.basehash[k]
224 data['gendeps'] = {}
225 data['varvals'] = {}
226 data['varvals'][task] = self.lookupcache[fn][task]
227 for dep in self.taskdeps[fn][task]:
228 if dep in self.basewhitelist:
229 continue
230 data['gendeps'][dep] = self.gendeps[fn][dep]
231 data['varvals'][dep] = self.lookupcache[fn][dep]
232
233 if runtime and k in self.taskhash:
234 data['runtaskdeps'] = self.runtaskdeps[k]
235 data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k].items()]
236 data['runtaskhashes'] = {}
237 for dep in data['runtaskdeps']:
238 data['runtaskhashes'][dep] = self.taskhash[dep]
239
240 taint = self.read_taint(fn, task, stampbase)
241 if taint:
242 data['taint'] = taint
243
244 fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
245 try:
246 with os.fdopen(fd, "wb") as stream:
247 p = pickle.dump(data, stream, -1)
248 stream.flush()
249 os.chmod(tmpfile, 0664)
250 os.rename(tmpfile, sigfile)
251 except (OSError, IOError) as err:
252 try:
253 os.unlink(tmpfile)
254 except OSError:
255 pass
256 raise err
257
258 def dump_sigs(self, dataCache, options):
259 for fn in self.taskdeps:
260 for task in self.taskdeps[fn]:
261 k = fn + "." + task
262 if k not in self.taskhash:
263 continue
264 if dataCache.basetaskhash[k] != self.basehash[k]:
265 bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
266 bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
267 self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
268
269class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
270 name = "basichash"
271
272 def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
273 if taskname != "do_setscene" and taskname.endswith("_setscene"):
274 k = fn + "." + taskname[:-9]
275 else:
276 k = fn + "." + taskname
277 if clean:
278 h = "*"
279 elif k in self.taskhash:
280 h = self.taskhash[k]
281 else:
282 # If k is not in basehash, then error
283 h = self.basehash[k]
284 return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
285
286 def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
287 return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
288
289 def invalidate_task(self, task, d, fn):
290 bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
291 bb.build.write_taint(task, d, fn)
292
293def dump_this_task(outfile, d):
294 import bb.parse
295 fn = d.getVar("BB_FILENAME", True)
296 task = "do_" + d.getVar("BB_CURRENTTASK", True)
297 bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
298
299def clean_basepath(a):
300 b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
301 if a.startswith("virtual:"):
302 b = b + ":" + a.rsplit(":", 1)[0]
303 return b
304
305def clean_basepaths(a):
306 b = {}
307 for x in a:
308 b[clean_basepath(x)] = a[x]
309 return b
310
311def compare_sigfiles(a, b, recursecb = None):
312 output = []
313
314 p1 = pickle.Unpickler(open(a, "rb"))
315 a_data = p1.load()
316 p2 = pickle.Unpickler(open(b, "rb"))
317 b_data = p2.load()
318
319 def dict_diff(a, b, whitelist=set()):
320 sa = set(a.keys())
321 sb = set(b.keys())
322 common = sa & sb
323 changed = set()
324 for i in common:
325 if a[i] != b[i] and i not in whitelist:
326 changed.add(i)
327 added = sb - sa
328 removed = sa - sb
329 return changed, added, removed
330
331 def file_checksums_diff(a, b):
332 from collections import Counter
333 # Handle old siginfo format
334 if isinstance(a, dict):
335 a = [(os.path.basename(f), cs) for f, cs in a.items()]
336 if isinstance(b, dict):
337 b = [(os.path.basename(f), cs) for f, cs in b.items()]
338 # Compare lists, ensuring we can handle duplicate filenames if they exist
339 removedcount = Counter(a)
340 removedcount.subtract(b)
341 addedcount = Counter(b)
342 addedcount.subtract(a)
343 added = []
344 for x in b:
345 if addedcount[x] > 0:
346 addedcount[x] -= 1
347 added.append(x)
348 removed = []
349 changed = []
350 for x in a:
351 if removedcount[x] > 0:
352 removedcount[x] -= 1
353 for y in added:
354 if y[0] == x[0]:
355 changed.append((x[0], x[1], y[1]))
356 added.remove(y)
357 break
358 else:
359 removed.append(x)
360 added = [x[0] for x in added]
361 removed = [x[0] for x in removed]
362 return changed, added, removed
363
364 if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
365 output.append("basewhitelist changed from '%s' to '%s'" % (a_data['basewhitelist'], b_data['basewhitelist']))
366 if a_data['basewhitelist'] and b_data['basewhitelist']:
367 output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist']))
368
369 if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
370 output.append("taskwhitelist changed from '%s' to '%s'" % (a_data['taskwhitelist'], b_data['taskwhitelist']))
371 if a_data['taskwhitelist'] and b_data['taskwhitelist']:
372 output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist']))
373
374 if a_data['taskdeps'] != b_data['taskdeps']:
375 output.append("Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
376
377 if a_data['basehash'] != b_data['basehash']:
378 output.append("basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash']))
379
380 changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
381 if changed:
382 for dep in changed:
383 output.append("List of dependencies for variable %s changed from '%s' to '%s'" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
384 if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
385 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
386 if added:
387 for dep in added:
388 output.append("Dependency on variable %s was added" % (dep))
389 if removed:
390 for dep in removed:
391 output.append("Dependency on Variable %s was removed" % (dep))
392
393
394 changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
395 if changed:
396 for dep in changed:
397 output.append("Variable %s value changed from '%s' to '%s'" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]))
398
399 changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
400 if changed:
401 for f, old, new in changed:
402 output.append("Checksum for file %s changed from %s to %s" % (f, old, new))
403 if added:
404 for f in added:
405 output.append("Dependency on checksum of file %s was added" % (f))
406 if removed:
407 for f in removed:
408 output.append("Dependency on checksum of file %s was removed" % (f))
409
410
411 if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
412 a = a_data['runtaskhashes']
413 b = b_data['runtaskhashes']
414 changed, added, removed = dict_diff(a, b)
415 if added:
416 for dep in added:
417 bdep_found = False
418 if removed:
419 for bdep in removed:
420 if b[dep] == a[bdep]:
421 #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
422 bdep_found = True
423 if not bdep_found:
424 output.append("Dependency on task %s was added with hash %s" % (clean_basepath(dep), b[dep]))
425 if removed:
426 for dep in removed:
427 adep_found = False
428 if added:
429 for adep in added:
430 if b[adep] == a[dep]:
431 #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
432 adep_found = True
433 if not adep_found:
434 output.append("Dependency on task %s was removed with hash %s" % (clean_basepath(dep), a[dep]))
435 if changed:
436 for dep in changed:
437 output.append("Hash for dependent task %s changed from %s to %s" % (clean_basepath(dep), a[dep], b[dep]))
438 if callable(recursecb):
439 # If a dependent hash changed, might as well print the line above and then defer to the changes in
440 # that hash since in all likelyhood, they're the same changes this task also saw.
441 recout = recursecb(dep, a[dep], b[dep])
442 if recout:
443 output = [output[-1]] + recout
444
445 a_taint = a_data.get('taint', None)
446 b_taint = b_data.get('taint', None)
447 if a_taint != b_taint:
448 output.append("Taint (by forced/invalidated task) changed from %s to %s" % (a_taint, b_taint))
449
450 return output
451
452
453def dump_sigfile(a):
454 output = []
455
456 p1 = pickle.Unpickler(open(a, "rb"))
457 a_data = p1.load()
458
459 output.append("basewhitelist: %s" % (a_data['basewhitelist']))
460
461 output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))
462
463 output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
464
465 output.append("basehash: %s" % (a_data['basehash']))
466
467 for dep in a_data['gendeps']:
468 output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep]))
469
470 for dep in a_data['varvals']:
471 output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
472
473 if 'runtaskdeps' in a_data:
474 output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps']))
475
476 if 'file_checksum_values' in a_data:
477 output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values']))
478
479 if 'runtaskhashes' in a_data:
480 for dep in a_data['runtaskhashes']:
481 output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
482
483 if 'taint' in a_data:
484 output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
485
486 return output
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
new file mode 100644
index 0000000000..af72a1fb09
--- /dev/null
+++ b/bitbake/lib/bb/taskdata.py
@@ -0,0 +1,651 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake 'TaskData' implementation
6
7Task data collection and handling
8
9"""
10
11# Copyright (C) 2006 Richard Purdie
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import logging
27import re
28import bb
29
30logger = logging.getLogger("BitBake.TaskData")
31
32def re_match_strings(target, strings):
33 """
34 Whether or not the string 'target' matches
35 any one string of the strings which can be regular expression string
36 """
37 return any(name == target or re.match(name, target)
38 for name in strings)
39
40class TaskData:
41 """
42 BitBake Task Data implementation
43 """
44 def __init__(self, abort = True, tryaltconfigs = False, skiplist = None):
45 self.build_names_index = []
46 self.run_names_index = []
47 self.fn_index = []
48
49 self.build_targets = {}
50 self.run_targets = {}
51
52 self.external_targets = []
53
54 self.tasks_fnid = []
55 self.tasks_name = []
56 self.tasks_tdepends = []
57 self.tasks_idepends = []
58 self.tasks_irdepends = []
59 # Cache to speed up task ID lookups
60 self.tasks_lookup = {}
61
62 self.depids = {}
63 self.rdepids = {}
64
65 self.consider_msgs_cache = []
66
67 self.failed_deps = []
68 self.failed_rdeps = []
69 self.failed_fnids = []
70
71 self.abort = abort
72 self.tryaltconfigs = tryaltconfigs
73
74 self.skiplist = skiplist
75
76 def getbuild_id(self, name):
77 """
78 Return an ID number for the build target name.
79 If it doesn't exist, create one.
80 """
81 if not name in self.build_names_index:
82 self.build_names_index.append(name)
83 return len(self.build_names_index) - 1
84
85 return self.build_names_index.index(name)
86
87 def getrun_id(self, name):
88 """
89 Return an ID number for the run target name.
90 If it doesn't exist, create one.
91 """
92 if not name in self.run_names_index:
93 self.run_names_index.append(name)
94 return len(self.run_names_index) - 1
95
96 return self.run_names_index.index(name)
97
98 def getfn_id(self, name):
99 """
100 Return an ID number for the filename.
101 If it doesn't exist, create one.
102 """
103 if not name in self.fn_index:
104 self.fn_index.append(name)
105 return len(self.fn_index) - 1
106
107 return self.fn_index.index(name)
108
109 def gettask_ids(self, fnid):
110 """
111 Return an array of the ID numbers matching a given fnid.
112 """
113 ids = []
114 if fnid in self.tasks_lookup:
115 for task in self.tasks_lookup[fnid]:
116 ids.append(self.tasks_lookup[fnid][task])
117 return ids
118
119 def gettask_id_fromfnid(self, fnid, task):
120 """
121 Return an ID number for the task matching fnid and task.
122 """
123 if fnid in self.tasks_lookup:
124 if task in self.tasks_lookup[fnid]:
125 return self.tasks_lookup[fnid][task]
126
127 return None
128
129 def gettask_id(self, fn, task, create = True):
130 """
131 Return an ID number for the task matching fn and task.
132 If it doesn't exist, create one by default.
133 Optionally return None instead.
134 """
135 fnid = self.getfn_id(fn)
136
137 if fnid in self.tasks_lookup:
138 if task in self.tasks_lookup[fnid]:
139 return self.tasks_lookup[fnid][task]
140
141 if not create:
142 return None
143
144 self.tasks_name.append(task)
145 self.tasks_fnid.append(fnid)
146 self.tasks_tdepends.append([])
147 self.tasks_idepends.append([])
148 self.tasks_irdepends.append([])
149
150 listid = len(self.tasks_name) - 1
151
152 if fnid not in self.tasks_lookup:
153 self.tasks_lookup[fnid] = {}
154 self.tasks_lookup[fnid][task] = listid
155
156 return listid
157
158 def add_tasks(self, fn, dataCache):
159 """
160 Add tasks for a given fn to the database
161 """
162
163 task_deps = dataCache.task_deps[fn]
164
165 fnid = self.getfn_id(fn)
166
167 if fnid in self.failed_fnids:
168 bb.msg.fatal("TaskData", "Trying to re-add a failed file? Something is broken...")
169
170 # Check if we've already seen this fn
171 if fnid in self.tasks_fnid:
172 return
173
174 for task in task_deps['tasks']:
175
176 # Work out task dependencies
177 parentids = []
178 for dep in task_deps['parents'][task]:
179 if dep not in task_deps['tasks']:
180 bb.debug(2, "Not adding dependeny of %s on %s since %s does not exist" % (task, dep, dep))
181 continue
182 parentid = self.gettask_id(fn, dep)
183 parentids.append(parentid)
184 taskid = self.gettask_id(fn, task)
185 self.tasks_tdepends[taskid].extend(parentids)
186
187 # Touch all intertask dependencies
188 if 'depends' in task_deps and task in task_deps['depends']:
189 ids = []
190 for dep in task_deps['depends'][task].split():
191 if dep:
192 if ":" not in dep:
193 bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep))
194 ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
195 self.tasks_idepends[taskid].extend(ids)
196 if 'rdepends' in task_deps and task in task_deps['rdepends']:
197 ids = []
198 for dep in task_deps['rdepends'][task].split():
199 if dep:
200 if ":" not in dep:
201 bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep))
202 ids.append(((self.getrun_id(dep.split(":")[0])), dep.split(":")[1]))
203 self.tasks_irdepends[taskid].extend(ids)
204
205
206 # Work out build dependencies
207 if not fnid in self.depids:
208 dependids = {}
209 for depend in dataCache.deps[fn]:
210 dependids[self.getbuild_id(depend)] = None
211 self.depids[fnid] = dependids.keys()
212 logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
213
214 # Work out runtime dependencies
215 if not fnid in self.rdepids:
216 rdependids = {}
217 rdepends = dataCache.rundeps[fn]
218 rrecs = dataCache.runrecs[fn]
219 rdependlist = []
220 rreclist = []
221 for package in rdepends:
222 for rdepend in rdepends[package]:
223 rdependlist.append(rdepend)
224 rdependids[self.getrun_id(rdepend)] = None
225 for package in rrecs:
226 for rdepend in rrecs[package]:
227 rreclist.append(rdepend)
228 rdependids[self.getrun_id(rdepend)] = None
229 if rdependlist:
230 logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn)
231 if rreclist:
232 logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn)
233 self.rdepids[fnid] = rdependids.keys()
234
235 for dep in self.depids[fnid]:
236 if dep in self.failed_deps:
237 self.fail_fnid(fnid)
238 return
239 for dep in self.rdepids[fnid]:
240 if dep in self.failed_rdeps:
241 self.fail_fnid(fnid)
242 return
243
244 def have_build_target(self, target):
245 """
246 Have we a build target matching this name?
247 """
248 targetid = self.getbuild_id(target)
249
250 if targetid in self.build_targets:
251 return True
252 return False
253
254 def have_runtime_target(self, target):
255 """
256 Have we a runtime target matching this name?
257 """
258 targetid = self.getrun_id(target)
259
260 if targetid in self.run_targets:
261 return True
262 return False
263
264 def add_build_target(self, fn, item):
265 """
266 Add a build target.
267 If already present, append the provider fn to the list
268 """
269 targetid = self.getbuild_id(item)
270 fnid = self.getfn_id(fn)
271
272 if targetid in self.build_targets:
273 if fnid in self.build_targets[targetid]:
274 return
275 self.build_targets[targetid].append(fnid)
276 return
277 self.build_targets[targetid] = [fnid]
278
279 def add_runtime_target(self, fn, item):
280 """
281 Add a runtime target.
282 If already present, append the provider fn to the list
283 """
284 targetid = self.getrun_id(item)
285 fnid = self.getfn_id(fn)
286
287 if targetid in self.run_targets:
288 if fnid in self.run_targets[targetid]:
289 return
290 self.run_targets[targetid].append(fnid)
291 return
292 self.run_targets[targetid] = [fnid]
293
294 def mark_external_target(self, item):
295 """
296 Mark a build target as being externally requested
297 """
298 targetid = self.getbuild_id(item)
299
300 if targetid not in self.external_targets:
301 self.external_targets.append(targetid)
302
303 def get_unresolved_build_targets(self, dataCache):
304 """
305 Return a list of build targets who's providers
306 are unknown.
307 """
308 unresolved = []
309 for target in self.build_names_index:
310 if re_match_strings(target, dataCache.ignored_dependencies):
311 continue
312 if self.build_names_index.index(target) in self.failed_deps:
313 continue
314 if not self.have_build_target(target):
315 unresolved.append(target)
316 return unresolved
317
318 def get_unresolved_run_targets(self, dataCache):
319 """
320 Return a list of runtime targets who's providers
321 are unknown.
322 """
323 unresolved = []
324 for target in self.run_names_index:
325 if re_match_strings(target, dataCache.ignored_dependencies):
326 continue
327 if self.run_names_index.index(target) in self.failed_rdeps:
328 continue
329 if not self.have_runtime_target(target):
330 unresolved.append(target)
331 return unresolved
332
333 def get_provider(self, item):
334 """
335 Return a list of providers of item
336 """
337 targetid = self.getbuild_id(item)
338
339 return self.build_targets[targetid]
340
341 def get_dependees(self, itemid):
342 """
343 Return a list of targets which depend on item
344 """
345 dependees = []
346 for fnid in self.depids:
347 if itemid in self.depids[fnid]:
348 dependees.append(fnid)
349 return dependees
350
351 def get_dependees_str(self, item):
352 """
353 Return a list of targets which depend on item as a user readable string
354 """
355 itemid = self.getbuild_id(item)
356 dependees = []
357 for fnid in self.depids:
358 if itemid in self.depids[fnid]:
359 dependees.append(self.fn_index[fnid])
360 return dependees
361
362 def get_rdependees(self, itemid):
363 """
364 Return a list of targets which depend on runtime item
365 """
366 dependees = []
367 for fnid in self.rdepids:
368 if itemid in self.rdepids[fnid]:
369 dependees.append(fnid)
370 return dependees
371
372 def get_rdependees_str(self, item):
373 """
374 Return a list of targets which depend on runtime item as a user readable string
375 """
376 itemid = self.getrun_id(item)
377 dependees = []
378 for fnid in self.rdepids:
379 if itemid in self.rdepids[fnid]:
380 dependees.append(self.fn_index[fnid])
381 return dependees
382
383 def get_reasons(self, item, runtime=False):
384 """
385 Get the reason(s) for an item not being provided, if any
386 """
387 reasons = []
388 if self.skiplist:
389 for fn in self.skiplist:
390 skipitem = self.skiplist[fn]
391 if skipitem.pn == item:
392 reasons.append("%s was skipped: %s" % (skipitem.pn, skipitem.skipreason))
393 elif runtime and item in skipitem.rprovides:
394 reasons.append("%s RPROVIDES %s but was skipped: %s" % (skipitem.pn, item, skipitem.skipreason))
395 elif not runtime and item in skipitem.provides:
396 reasons.append("%s PROVIDES %s but was skipped: %s" % (skipitem.pn, item, skipitem.skipreason))
397 return reasons
398
399 def get_close_matches(self, item, provider_list):
400 import difflib
401 if self.skiplist:
402 skipped = []
403 for fn in self.skiplist:
404 skipped.append(self.skiplist[fn].pn)
405 full_list = provider_list + skipped
406 else:
407 full_list = provider_list
408 return difflib.get_close_matches(item, full_list, cutoff=0.7)
409
410 def add_provider(self, cfgData, dataCache, item):
411 try:
412 self.add_provider_internal(cfgData, dataCache, item)
413 except bb.providers.NoProvider:
414 if self.abort:
415 raise
416 self.remove_buildtarget(self.getbuild_id(item))
417
418 self.mark_external_target(item)
419
420 def add_provider_internal(self, cfgData, dataCache, item):
421 """
422 Add the providers of item to the task data
423 Mark entries were specifically added externally as against dependencies
424 added internally during dependency resolution
425 """
426
427 if re_match_strings(item, dataCache.ignored_dependencies):
428 return
429
430 if not item in dataCache.providers:
431 bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=self.get_reasons(item), close_matches=self.get_close_matches(item, dataCache.providers.keys())), cfgData)
432 raise bb.providers.NoProvider(item)
433
434 if self.have_build_target(item):
435 return
436
437 all_p = dataCache.providers[item]
438
439 eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
440 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
441
442 if not eligible:
443 bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
444 raise bb.providers.NoProvider(item)
445
446 if len(eligible) > 1 and foundUnique == False:
447 if item not in self.consider_msgs_cache:
448 providers_list = []
449 for fn in eligible:
450 providers_list.append(dataCache.pkg_fn[fn])
451 bb.event.fire(bb.event.MultipleProviders(item, providers_list), cfgData)
452 self.consider_msgs_cache.append(item)
453
454 for fn in eligible:
455 fnid = self.getfn_id(fn)
456 if fnid in self.failed_fnids:
457 continue
458 logger.debug(2, "adding %s to satisfy %s", fn, item)
459 self.add_build_target(fn, item)
460 self.add_tasks(fn, dataCache)
461
462
463 #item = dataCache.pkg_fn[fn]
464
465 def add_rprovider(self, cfgData, dataCache, item):
466 """
467 Add the runtime providers of item to the task data
468 (takes item names from RDEPENDS/PACKAGES namespace)
469 """
470
471 if re_match_strings(item, dataCache.ignored_dependencies):
472 return
473
474 if self.have_runtime_target(item):
475 return
476
477 all_p = bb.providers.getRuntimeProviders(dataCache, item)
478
479 if not all_p:
480 bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=self.get_reasons(item, True)), cfgData)
481 raise bb.providers.NoRProvider(item)
482
483 eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
484 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
485
486 if not eligible:
487 bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=["No eligible RPROVIDERs exist for '%s'" % item]), cfgData)
488 raise bb.providers.NoRProvider(item)
489
490 if len(eligible) > 1 and numberPreferred == 0:
491 if item not in self.consider_msgs_cache:
492 providers_list = []
493 for fn in eligible:
494 providers_list.append(dataCache.pkg_fn[fn])
495 bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
496 self.consider_msgs_cache.append(item)
497
498 if numberPreferred > 1:
499 if item not in self.consider_msgs_cache:
500 providers_list = []
501 for fn in eligible:
502 providers_list.append(dataCache.pkg_fn[fn])
503 bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
504 self.consider_msgs_cache.append(item)
505 raise bb.providers.MultipleRProvider(item)
506
507 # run through the list until we find one that we can build
508 for fn in eligible:
509 fnid = self.getfn_id(fn)
510 if fnid in self.failed_fnids:
511 continue
512 logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
513 self.add_runtime_target(fn, item)
514 self.add_tasks(fn, dataCache)
515
516 def fail_fnid(self, fnid, missing_list = []):
517 """
518 Mark a file as failed (unbuildable)
519 Remove any references from build and runtime provider lists
520
521 missing_list, A list of missing requirements for this target
522 """
523 if fnid in self.failed_fnids:
524 return
525 logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
526 self.failed_fnids.append(fnid)
527 for target in self.build_targets:
528 if fnid in self.build_targets[target]:
529 self.build_targets[target].remove(fnid)
530 if len(self.build_targets[target]) == 0:
531 self.remove_buildtarget(target, missing_list)
532 for target in self.run_targets:
533 if fnid in self.run_targets[target]:
534 self.run_targets[target].remove(fnid)
535 if len(self.run_targets[target]) == 0:
536 self.remove_runtarget(target, missing_list)
537
538 def remove_buildtarget(self, targetid, missing_list = []):
539 """
540 Mark a build target as failed (unbuildable)
541 Trigger removal of any files that have this as a dependency
542 """
543 if not missing_list:
544 missing_list = [self.build_names_index[targetid]]
545 else:
546 missing_list = [self.build_names_index[targetid]] + missing_list
547 logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
548 self.failed_deps.append(targetid)
549 dependees = self.get_dependees(targetid)
550 for fnid in dependees:
551 self.fail_fnid(fnid, missing_list)
552 for taskid in xrange(len(self.tasks_idepends)):
553 idepends = self.tasks_idepends[taskid]
554 for (idependid, idependtask) in idepends:
555 if idependid == targetid:
556 self.fail_fnid(self.tasks_fnid[taskid], missing_list)
557
558 if self.abort and targetid in self.external_targets:
559 target = self.build_names_index[targetid]
560 logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
561 raise bb.providers.NoProvider(target)
562
563 def remove_runtarget(self, targetid, missing_list = []):
564 """
565 Mark a run target as failed (unbuildable)
566 Trigger removal of any files that have this as a dependency
567 """
568 if not missing_list:
569 missing_list = [self.run_names_index[targetid]]
570 else:
571 missing_list = [self.run_names_index[targetid]] + missing_list
572
573 logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
574 self.failed_rdeps.append(targetid)
575 dependees = self.get_rdependees(targetid)
576 for fnid in dependees:
577 self.fail_fnid(fnid, missing_list)
578 for taskid in xrange(len(self.tasks_irdepends)):
579 irdepends = self.tasks_irdepends[taskid]
580 for (idependid, idependtask) in irdepends:
581 if idependid == targetid:
582 self.fail_fnid(self.tasks_fnid[taskid], missing_list)
583
584 def add_unresolved(self, cfgData, dataCache):
585 """
586 Resolve all unresolved build and runtime targets
587 """
588 logger.info("Resolving any missing task queue dependencies")
589 while True:
590 added = 0
591 for target in self.get_unresolved_build_targets(dataCache):
592 try:
593 self.add_provider_internal(cfgData, dataCache, target)
594 added = added + 1
595 except bb.providers.NoProvider:
596 targetid = self.getbuild_id(target)
597 if self.abort and targetid in self.external_targets:
598 raise
599 self.remove_buildtarget(targetid)
600 for target in self.get_unresolved_run_targets(dataCache):
601 try:
602 self.add_rprovider(cfgData, dataCache, target)
603 added = added + 1
604 except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
605 self.remove_runtarget(self.getrun_id(target))
606 logger.debug(1, "Resolved " + str(added) + " extra dependencies")
607 if added == 0:
608 break
609 # self.dump_data()
610
611 def dump_data(self):
612 """
613 Dump some debug information on the internal data structures
614 """
615 logger.debug(3, "build_names:")
616 logger.debug(3, ", ".join(self.build_names_index))
617
618 logger.debug(3, "run_names:")
619 logger.debug(3, ", ".join(self.run_names_index))
620
621 logger.debug(3, "build_targets:")
622 for buildid in xrange(len(self.build_names_index)):
623 target = self.build_names_index[buildid]
624 targets = "None"
625 if buildid in self.build_targets:
626 targets = self.build_targets[buildid]
627 logger.debug(3, " (%s)%s: %s", buildid, target, targets)
628
629 logger.debug(3, "run_targets:")
630 for runid in xrange(len(self.run_names_index)):
631 target = self.run_names_index[runid]
632 targets = "None"
633 if runid in self.run_targets:
634 targets = self.run_targets[runid]
635 logger.debug(3, " (%s)%s: %s", runid, target, targets)
636
637 logger.debug(3, "tasks:")
638 for task in xrange(len(self.tasks_name)):
639 logger.debug(3, " (%s)%s - %s: %s",
640 task,
641 self.fn_index[self.tasks_fnid[task]],
642 self.tasks_name[task],
643 self.tasks_tdepends[task])
644
645 logger.debug(3, "dependency ids (per fn):")
646 for fnid in self.depids:
647 logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
648
649 logger.debug(3, "runtime dependency ids (per fn):")
650 for fnid in self.rdepids:
651 logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
diff --git a/bitbake/lib/bb/tests/__init__.py b/bitbake/lib/bb/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/tests/__init__.py
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
new file mode 100644
index 0000000000..4454bc51ed
--- /dev/null
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -0,0 +1,375 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Test for codeparser.py
5#
6# Copyright (C) 2010 Chris Larson
7# Copyright (C) 2012 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21#
22
23import unittest
24import logging
25import bb
26
27logger = logging.getLogger('BitBake.TestCodeParser')
28
29# bb.data references bb.parse but can't directly import due to circular dependencies.
30# Hack around it for now :(
31import bb.parse
32import bb.data
33
34class ReferenceTest(unittest.TestCase):
35 def setUp(self):
36 self.d = bb.data.init()
37
38 def setEmptyVars(self, varlist):
39 for k in varlist:
40 self.d.setVar(k, "")
41
42 def setValues(self, values):
43 for k, v in values.items():
44 self.d.setVar(k, v)
45
46 def assertReferences(self, refs):
47 self.assertEqual(self.references, refs)
48
49 def assertExecs(self, execs):
50 self.assertEqual(self.execs, execs)
51
52class VariableReferenceTest(ReferenceTest):
53
54 def parseExpression(self, exp):
55 parsedvar = self.d.expandWithRefs(exp, None)
56 self.references = parsedvar.references
57
58 def test_simple_reference(self):
59 self.setEmptyVars(["FOO"])
60 self.parseExpression("${FOO}")
61 self.assertReferences(set(["FOO"]))
62
63 def test_nested_reference(self):
64 self.setEmptyVars(["BAR"])
65 self.d.setVar("FOO", "BAR")
66 self.parseExpression("${${FOO}}")
67 self.assertReferences(set(["FOO", "BAR"]))
68
69 def test_python_reference(self):
70 self.setEmptyVars(["BAR"])
71 self.parseExpression("${@bb.data.getVar('BAR', d, True) + 'foo'}")
72 self.assertReferences(set(["BAR"]))
73
74class ShellReferenceTest(ReferenceTest):
75
76 def parseExpression(self, exp):
77 parsedvar = self.d.expandWithRefs(exp, None)
78 parser = bb.codeparser.ShellParser("ParserTest", logger)
79 parser.parse_shell(parsedvar.value)
80
81 self.references = parsedvar.references
82 self.execs = parser.execs
83
84 def test_quotes_inside_assign(self):
85 self.parseExpression('foo=foo"bar"baz')
86 self.assertReferences(set([]))
87
88 def test_quotes_inside_arg(self):
89 self.parseExpression('sed s#"bar baz"#"alpha beta"#g')
90 self.assertExecs(set(["sed"]))
91
92 def test_arg_continuation(self):
93 self.parseExpression("sed -i -e s,foo,bar,g \\\n *.pc")
94 self.assertExecs(set(["sed"]))
95
96 def test_dollar_in_quoted(self):
97 self.parseExpression('sed -i -e "foo$" *.pc')
98 self.assertExecs(set(["sed"]))
99
100 def test_quotes_inside_arg_continuation(self):
101 self.setEmptyVars(["bindir", "D", "libdir"])
102 self.parseExpression("""
103sed -i -e s#"moc_location=.*$"#"moc_location=${bindir}/moc4"# \\
104-e s#"uic_location=.*$"#"uic_location=${bindir}/uic4"# \\
105${D}${libdir}/pkgconfig/*.pc
106""")
107 self.assertReferences(set(["bindir", "D", "libdir"]))
108
109 def test_assign_subshell_expansion(self):
110 self.parseExpression("foo=$(echo bar)")
111 self.assertExecs(set(["echo"]))
112
113 def test_shell_unexpanded(self):
114 self.setEmptyVars(["QT_BASE_NAME"])
115 self.parseExpression('echo "${QT_BASE_NAME}"')
116 self.assertExecs(set(["echo"]))
117 self.assertReferences(set(["QT_BASE_NAME"]))
118
119 def test_incomplete_varexp_single_quotes(self):
120 self.parseExpression("sed -i -e 's:IP{:I${:g' $pc")
121 self.assertExecs(set(["sed"]))
122
123
124 def test_until(self):
125 self.parseExpression("until false; do echo true; done")
126 self.assertExecs(set(["false", "echo"]))
127 self.assertReferences(set())
128
129 def test_case(self):
130 self.parseExpression("""
131case $foo in
132*)
133bar
134;;
135esac
136""")
137 self.assertExecs(set(["bar"]))
138 self.assertReferences(set())
139
140 def test_assign_exec(self):
141 self.parseExpression("a=b c='foo bar' alpha 1 2 3")
142 self.assertExecs(set(["alpha"]))
143
144 def test_redirect_to_file(self):
145 self.setEmptyVars(["foo"])
146 self.parseExpression("echo foo >${foo}/bar")
147 self.assertExecs(set(["echo"]))
148 self.assertReferences(set(["foo"]))
149
150 def test_heredoc(self):
151 self.setEmptyVars(["theta"])
152 self.parseExpression("""
153cat <<END
154alpha
155beta
156${theta}
157END
158""")
159 self.assertReferences(set(["theta"]))
160
161 def test_redirect_from_heredoc(self):
162 v = ["B", "SHADOW_MAILDIR", "SHADOW_MAILFILE", "SHADOW_UTMPDIR", "SHADOW_LOGDIR", "bindir"]
163 self.setEmptyVars(v)
164 self.parseExpression("""
165cat <<END >${B}/cachedpaths
166shadow_cv_maildir=${SHADOW_MAILDIR}
167shadow_cv_mailfile=${SHADOW_MAILFILE}
168shadow_cv_utmpdir=${SHADOW_UTMPDIR}
169shadow_cv_logdir=${SHADOW_LOGDIR}
170shadow_cv_passwd_dir=${bindir}
171END
172""")
173 self.assertReferences(set(v))
174 self.assertExecs(set(["cat"]))
175
176# def test_incomplete_command_expansion(self):
177# self.assertRaises(reftracker.ShellSyntaxError, reftracker.execs,
178# bbvalue.shparse("cp foo`", self.d), self.d)
179
180# def test_rogue_dollarsign(self):
181# self.setValues({"D" : "/tmp"})
182# self.parseExpression("install -d ${D}$")
183# self.assertReferences(set(["D"]))
184# self.assertExecs(set(["install"]))
185
186
187class PythonReferenceTest(ReferenceTest):
188
189 def setUp(self):
190 self.d = bb.data.init()
191 if hasattr(bb.utils, "_context"):
192 self.context = bb.utils._context
193 else:
194 import __builtin__
195 self.context = __builtin__.__dict__
196
197 def parseExpression(self, exp):
198 parsedvar = self.d.expandWithRefs(exp, None)
199 parser = bb.codeparser.PythonParser("ParserTest", logger)
200 parser.parse_python(parsedvar.value)
201
202 self.references = parsedvar.references | parser.references
203 self.execs = parser.execs
204
205 @staticmethod
206 def indent(value):
207 """Python Snippets have to be indented, python values don't have to
208be. These unit tests are testing snippets."""
209 return " " + value
210
211 def test_getvar_reference(self):
212 self.parseExpression("bb.data.getVar('foo', d, True)")
213 self.assertReferences(set(["foo"]))
214 self.assertExecs(set())
215
216 def test_getvar_computed_reference(self):
217 self.parseExpression("bb.data.getVar('f' + 'o' + 'o', d, True)")
218 self.assertReferences(set())
219 self.assertExecs(set())
220
221 def test_getvar_exec_reference(self):
222 self.parseExpression("eval('bb.data.getVar(\"foo\", d, True)')")
223 self.assertReferences(set())
224 self.assertExecs(set(["eval"]))
225
226 def test_var_reference(self):
227 self.context["foo"] = lambda x: x
228 self.setEmptyVars(["FOO"])
229 self.parseExpression("foo('${FOO}')")
230 self.assertReferences(set(["FOO"]))
231 self.assertExecs(set(["foo"]))
232 del self.context["foo"]
233
234 def test_var_exec(self):
235 for etype in ("func", "task"):
236 self.d.setVar("do_something", "echo 'hi mom! ${FOO}'")
237 self.d.setVarFlag("do_something", etype, True)
238 self.parseExpression("bb.build.exec_func('do_something', d)")
239 self.assertReferences(set([]))
240 self.assertExecs(set(["do_something"]))
241
242 def test_function_reference(self):
243 self.context["testfunc"] = lambda msg: bb.msg.note(1, None, msg)
244 self.d.setVar("FOO", "Hello, World!")
245 self.parseExpression("testfunc('${FOO}')")
246 self.assertReferences(set(["FOO"]))
247 self.assertExecs(set(["testfunc"]))
248 del self.context["testfunc"]
249
250 def test_qualified_function_reference(self):
251 self.parseExpression("time.time()")
252 self.assertExecs(set(["time.time"]))
253
254 def test_qualified_function_reference_2(self):
255 self.parseExpression("os.path.dirname('/foo/bar')")
256 self.assertExecs(set(["os.path.dirname"]))
257
258 def test_qualified_function_reference_nested(self):
259 self.parseExpression("time.strftime('%Y%m%d',time.gmtime())")
260 self.assertExecs(set(["time.strftime", "time.gmtime"]))
261
262 def test_function_reference_chained(self):
263 self.context["testget"] = lambda: "\tstrip me "
264 self.parseExpression("testget().strip()")
265 self.assertExecs(set(["testget"]))
266 del self.context["testget"]
267
268
269class DependencyReferenceTest(ReferenceTest):
270
271 pydata = """
272bb.data.getVar('somevar', d, True)
273def test(d):
274 foo = 'bar %s' % 'foo'
275def test2(d):
276 d.getVar(foo, True)
277 d.getVar('bar', False)
278 test2(d)
279
280def a():
281 \"\"\"some
282 stuff
283 \"\"\"
284 return "heh"
285
286test(d)
287
288bb.data.expand(bb.data.getVar("something", False, d), d)
289bb.data.expand("${inexpand} somethingelse", d)
290bb.data.getVar(a(), d, False)
291"""
292
293 def test_python(self):
294 self.d.setVar("FOO", self.pydata)
295 self.setEmptyVars(["inexpand", "a", "test2", "test"])
296 self.d.setVarFlags("FOO", {"func": True, "python": True})
297
298 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
299
300 self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
301
302
303 shelldata = """
304foo () {
305bar
306}
307{
308echo baz
309$(heh)
310eval `moo`
311}
312a=b
313c=d
314(
315true && false
316test -f foo
317testval=something
318$testval
319) || aiee
320! inverted
321echo ${somevar}
322
323case foo in
324bar)
325echo bar
326;;
327baz)
328echo baz
329;;
330foo*)
331echo foo
332;;
333esac
334"""
335
336 def test_shell(self):
337 execs = ["bar", "echo", "heh", "moo", "true", "aiee"]
338 self.d.setVar("somevar", "heh")
339 self.d.setVar("inverted", "echo inverted...")
340 self.d.setVarFlag("inverted", "func", True)
341 self.d.setVar("FOO", self.shelldata)
342 self.d.setVarFlags("FOO", {"func": True})
343 self.setEmptyVars(execs)
344
345 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
346
347 self.assertEquals(deps, set(["somevar", "inverted"] + execs))
348
349
350 def test_vardeps(self):
351 self.d.setVar("oe_libinstall", "echo test")
352 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
353 self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
354
355 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
356
357 self.assertEquals(deps, set(["oe_libinstall"]))
358
359 def test_vardeps_expand(self):
360 self.d.setVar("oe_libinstall", "echo test")
361 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
362 self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
363
364 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
365
366 self.assertEquals(deps, set(["oe_libinstall"]))
367
368 #Currently no wildcard support
369 #def test_vardeps_wildcards(self):
370 # self.d.setVar("oe_libinstall", "echo test")
371 # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
372 # self.d.setVarFlag("FOO", "vardeps", "oe_*")
373 # self.assertEquals(deps, set(["oe_libinstall"]))
374
375
diff --git a/bitbake/lib/bb/tests/cow.py b/bitbake/lib/bb/tests/cow.py
new file mode 100644
index 0000000000..35c5841f32
--- /dev/null
+++ b/bitbake/lib/bb/tests/cow.py
@@ -0,0 +1,136 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Tests for Copy-on-Write (cow.py)
5#
6# Copyright 2006 Holger Freyther <freyther@handhelds.org>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21
22import unittest
23import os
24
25class COWTestCase(unittest.TestCase):
26 """
27 Test case for the COW module from mithro
28 """
29
30 def testGetSet(self):
31 """
32 Test and set
33 """
34 from bb.COW import COWDictBase
35 a = COWDictBase.copy()
36
37 self.assertEquals(False, a.has_key('a'))
38
39 a['a'] = 'a'
40 a['b'] = 'b'
41 self.assertEquals(True, a.has_key('a'))
42 self.assertEquals(True, a.has_key('b'))
43 self.assertEquals('a', a['a'] )
44 self.assertEquals('b', a['b'] )
45
46 def testCopyCopy(self):
47 """
48 Test the copy of copies
49 """
50
51 from bb.COW import COWDictBase
52
53 # create two COW dict 'instances'
54 b = COWDictBase.copy()
55 c = COWDictBase.copy()
56
57 # assign some keys to one instance, some keys to another
58 b['a'] = 10
59 b['c'] = 20
60 c['a'] = 30
61
62 # test separation of the two instances
63 self.assertEquals(False, c.has_key('c'))
64 self.assertEquals(30, c['a'])
65 self.assertEquals(10, b['a'])
66
67 # test copy
68 b_2 = b.copy()
69 c_2 = c.copy()
70
71 self.assertEquals(False, c_2.has_key('c'))
72 self.assertEquals(10, b_2['a'])
73
74 b_2['d'] = 40
75 self.assertEquals(False, c_2.has_key('d'))
76 self.assertEquals(True, b_2.has_key('d'))
77 self.assertEquals(40, b_2['d'])
78 self.assertEquals(False, b.has_key('d'))
79 self.assertEquals(False, c.has_key('d'))
80
81 c_2['d'] = 30
82 self.assertEquals(True, c_2.has_key('d'))
83 self.assertEquals(True, b_2.has_key('d'))
84 self.assertEquals(30, c_2['d'])
85 self.assertEquals(40, b_2['d'])
86 self.assertEquals(False, b.has_key('d'))
87 self.assertEquals(False, c.has_key('d'))
88
89 # test copy of the copy
90 c_3 = c_2.copy()
91 b_3 = b_2.copy()
92 b_3_2 = b_2.copy()
93
94 c_3['e'] = 4711
95 self.assertEquals(4711, c_3['e'])
96 self.assertEquals(False, c_2.has_key('e'))
97 self.assertEquals(False, b_3.has_key('e'))
98 self.assertEquals(False, b_3_2.has_key('e'))
99 self.assertEquals(False, b_2.has_key('e'))
100
101 b_3['e'] = 'viel'
102 self.assertEquals('viel', b_3['e'])
103 self.assertEquals(4711, c_3['e'])
104 self.assertEquals(False, c_2.has_key('e'))
105 self.assertEquals(True, b_3.has_key('e'))
106 self.assertEquals(False, b_3_2.has_key('e'))
107 self.assertEquals(False, b_2.has_key('e'))
108
109 def testCow(self):
110 from bb.COW import COWDictBase
111 c = COWDictBase.copy()
112 c['123'] = 1027
113 c['other'] = 4711
114 c['d'] = { 'abc' : 10, 'bcd' : 20 }
115
116 copy = c.copy()
117
118 self.assertEquals(1027, c['123'])
119 self.assertEquals(4711, c['other'])
120 self.assertEquals({'abc':10, 'bcd':20}, c['d'])
121 self.assertEquals(1027, copy['123'])
122 self.assertEquals(4711, copy['other'])
123 self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
124
125 # cow it now
126 copy['123'] = 1028
127 copy['other'] = 4712
128 copy['d']['abc'] = 20
129
130
131 self.assertEquals(1027, c['123'])
132 self.assertEquals(4711, c['other'])
133 self.assertEquals({'abc':10, 'bcd':20}, c['d'])
134 self.assertEquals(1028, copy['123'])
135 self.assertEquals(4712, copy['other'])
136 self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
new file mode 100644
index 0000000000..23a5e5fd9c
--- /dev/null
+++ b/bitbake/lib/bb/tests/data.py
@@ -0,0 +1,351 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Tests for the Data Store (data.py/data_smart.py)
5#
6# Copyright (C) 2010 Chris Larson
7# Copyright (C) 2012 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21#
22
23import unittest
24import bb
25import bb.data
26import bb.parse
27
28class DataExpansions(unittest.TestCase):
29 def setUp(self):
30 self.d = bb.data.init()
31 self.d["foo"] = "value_of_foo"
32 self.d["bar"] = "value_of_bar"
33 self.d["value_of_foo"] = "value_of_'value_of_foo'"
34
35 def test_one_var(self):
36 val = self.d.expand("${foo}")
37 self.assertEqual(str(val), "value_of_foo")
38
39 def test_indirect_one_var(self):
40 val = self.d.expand("${${foo}}")
41 self.assertEqual(str(val), "value_of_'value_of_foo'")
42
43 def test_indirect_and_another(self):
44 val = self.d.expand("${${foo}} ${bar}")
45 self.assertEqual(str(val), "value_of_'value_of_foo' value_of_bar")
46
47 def test_python_snippet(self):
48 val = self.d.expand("${@5*12}")
49 self.assertEqual(str(val), "60")
50
51 def test_expand_in_python_snippet(self):
52 val = self.d.expand("${@'boo ' + '${foo}'}")
53 self.assertEqual(str(val), "boo value_of_foo")
54
55 def test_python_snippet_getvar(self):
56 val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
57 self.assertEqual(str(val), "value_of_foo value_of_bar")
58
59 def test_python_snippet_syntax_error(self):
60 self.d.setVar("FOO", "${@foo = 5}")
61 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
62
63 def test_python_snippet_runtime_error(self):
64 self.d.setVar("FOO", "${@int('test')}")
65 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
66
67 def test_python_snippet_error_path(self):
68 self.d.setVar("FOO", "foo value ${BAR}")
69 self.d.setVar("BAR", "bar value ${@int('test')}")
70 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
71
72 def test_value_containing_value(self):
73 val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
74 self.assertEqual(str(val), "value_of_foo value_of_bar")
75
76 def test_reference_undefined_var(self):
77 val = self.d.expand("${undefinedvar} meh")
78 self.assertEqual(str(val), "${undefinedvar} meh")
79
80 def test_double_reference(self):
81 self.d.setVar("BAR", "bar value")
82 self.d.setVar("FOO", "${BAR} foo ${BAR}")
83 val = self.d.getVar("FOO", True)
84 self.assertEqual(str(val), "bar value foo bar value")
85
86 def test_direct_recursion(self):
87 self.d.setVar("FOO", "${FOO}")
88 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
89
90 def test_indirect_recursion(self):
91 self.d.setVar("FOO", "${BAR}")
92 self.d.setVar("BAR", "${BAZ}")
93 self.d.setVar("BAZ", "${FOO}")
94 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
95
96 def test_recursion_exception(self):
97 self.d.setVar("FOO", "${BAR}")
98 self.d.setVar("BAR", "${${@'FOO'}}")
99 self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
100
101 def test_incomplete_varexp_single_quotes(self):
102 self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc")
103 val = self.d.getVar("FOO", True)
104 self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc")
105
106 def test_nonstring(self):
107 self.d.setVar("TEST", 5)
108 val = self.d.getVar("TEST", True)
109 self.assertEqual(str(val), "5")
110
111 def test_rename(self):
112 self.d.renameVar("foo", "newfoo")
113 self.assertEqual(self.d.getVar("newfoo"), "value_of_foo")
114 self.assertEqual(self.d.getVar("foo"), None)
115
116 def test_deletion(self):
117 self.d.delVar("foo")
118 self.assertEqual(self.d.getVar("foo"), None)
119
120 def test_keys(self):
121 keys = self.d.keys()
122 self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
123
124class TestNestedExpansions(unittest.TestCase):
125 def setUp(self):
126 self.d = bb.data.init()
127 self.d["foo"] = "foo"
128 self.d["bar"] = "bar"
129 self.d["value_of_foobar"] = "187"
130
131 def test_refs(self):
132 val = self.d.expand("${value_of_${foo}${bar}}")
133 self.assertEqual(str(val), "187")
134
135 #def test_python_refs(self):
136 # val = self.d.expand("${@${@3}**2 + ${@4}**2}")
137 # self.assertEqual(str(val), "25")
138
139 def test_ref_in_python_ref(self):
140 val = self.d.expand("${@'${foo}' + 'bar'}")
141 self.assertEqual(str(val), "foobar")
142
143 def test_python_ref_in_ref(self):
144 val = self.d.expand("${${@'f'+'o'+'o'}}")
145 self.assertEqual(str(val), "foo")
146
147 def test_deep_nesting(self):
148 depth = 100
149 val = self.d.expand("${" * depth + "foo" + "}" * depth)
150 self.assertEqual(str(val), "foo")
151
152 #def test_deep_python_nesting(self):
153 # depth = 50
154 # val = self.d.expand("${@" * depth + "1" + "+1}" * depth)
155 # self.assertEqual(str(val), str(depth + 1))
156
157 def test_mixed(self):
158 val = self.d.expand("${value_of_${@('${foo}'+'bar')[0:3]}${${@'BAR'.lower()}}}")
159 self.assertEqual(str(val), "187")
160
161 def test_runtime(self):
162 val = self.d.expand("${${@'value_of' + '_f'+'o'+'o'+'b'+'a'+'r'}}")
163 self.assertEqual(str(val), "187")
164
165class TestMemoize(unittest.TestCase):
166 def test_memoized(self):
167 d = bb.data.init()
168 d.setVar("FOO", "bar")
169 self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
170
171 def test_not_memoized(self):
172 d1 = bb.data.init()
173 d2 = bb.data.init()
174 d1.setVar("FOO", "bar")
175 d2.setVar("FOO", "bar2")
176 self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
177
178 def test_changed_after_memoized(self):
179 d = bb.data.init()
180 d.setVar("foo", "value of foo")
181 self.assertEqual(str(d.getVar("foo")), "value of foo")
182 d.setVar("foo", "second value of foo")
183 self.assertEqual(str(d.getVar("foo")), "second value of foo")
184
185 def test_same_value(self):
186 d = bb.data.init()
187 d.setVar("foo", "value of")
188 d.setVar("bar", "value of")
189 self.assertEqual(d.getVar("foo"),
190 d.getVar("bar"))
191
192class TestConcat(unittest.TestCase):
193 def setUp(self):
194 self.d = bb.data.init()
195 self.d.setVar("FOO", "foo")
196 self.d.setVar("VAL", "val")
197 self.d.setVar("BAR", "bar")
198
199 def test_prepend(self):
200 self.d.setVar("TEST", "${VAL}")
201 self.d.prependVar("TEST", "${FOO}:")
202 self.assertEqual(self.d.getVar("TEST", True), "foo:val")
203
204 def test_append(self):
205 self.d.setVar("TEST", "${VAL}")
206 self.d.appendVar("TEST", ":${BAR}")
207 self.assertEqual(self.d.getVar("TEST", True), "val:bar")
208
209 def test_multiple_append(self):
210 self.d.setVar("TEST", "${VAL}")
211 self.d.prependVar("TEST", "${FOO}:")
212 self.d.appendVar("TEST", ":val2")
213 self.d.appendVar("TEST", ":${BAR}")
214 self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
215
216class TestConcatOverride(unittest.TestCase):
217 def setUp(self):
218 self.d = bb.data.init()
219 self.d.setVar("FOO", "foo")
220 self.d.setVar("VAL", "val")
221 self.d.setVar("BAR", "bar")
222
223 def test_prepend(self):
224 self.d.setVar("TEST", "${VAL}")
225 self.d.setVar("TEST_prepend", "${FOO}:")
226 bb.data.update_data(self.d)
227 self.assertEqual(self.d.getVar("TEST", True), "foo:val")
228
229 def test_append(self):
230 self.d.setVar("TEST", "${VAL}")
231 self.d.setVar("TEST_append", ":${BAR}")
232 bb.data.update_data(self.d)
233 self.assertEqual(self.d.getVar("TEST", True), "val:bar")
234
235 def test_multiple_append(self):
236 self.d.setVar("TEST", "${VAL}")
237 self.d.setVar("TEST_prepend", "${FOO}:")
238 self.d.setVar("TEST_append", ":val2")
239 self.d.setVar("TEST_append", ":${BAR}")
240 bb.data.update_data(self.d)
241 self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
242
243 def test_remove(self):
244 self.d.setVar("TEST", "${VAL} ${BAR}")
245 self.d.setVar("TEST_remove", "val")
246 bb.data.update_data(self.d)
247 self.assertEqual(self.d.getVar("TEST", True), "bar")
248
249 def test_doubleref_remove(self):
250 self.d.setVar("TEST", "${VAL} ${BAR}")
251 self.d.setVar("TEST_remove", "val")
252 self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
253 bb.data.update_data(self.d)
254 self.assertEqual(self.d.getVar("TEST_TEST", True), "bar bar")
255
256 def test_empty_remove(self):
257 self.d.setVar("TEST", "")
258 self.d.setVar("TEST_remove", "val")
259 bb.data.update_data(self.d)
260 self.assertEqual(self.d.getVar("TEST", True), "")
261
262 def test_remove_expansion(self):
263 self.d.setVar("BAR", "Z")
264 self.d.setVar("TEST", "${BAR}/X Y")
265 self.d.setVar("TEST_remove", "${BAR}/X")
266 bb.data.update_data(self.d)
267 self.assertEqual(self.d.getVar("TEST", True), "Y")
268
269 def test_remove_expansion_items(self):
270 self.d.setVar("TEST", "A B C D")
271 self.d.setVar("BAR", "B D")
272 self.d.setVar("TEST_remove", "${BAR}")
273 bb.data.update_data(self.d)
274 self.assertEqual(self.d.getVar("TEST", True), "A C")
275
276class TestOverrides(unittest.TestCase):
277 def setUp(self):
278 self.d = bb.data.init()
279 self.d.setVar("OVERRIDES", "foo:bar:local")
280 self.d.setVar("TEST", "testvalue")
281
282 def test_no_override(self):
283 bb.data.update_data(self.d)
284 self.assertEqual(self.d.getVar("TEST", True), "testvalue")
285
286 def test_one_override(self):
287 self.d.setVar("TEST_bar", "testvalue2")
288 bb.data.update_data(self.d)
289 self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
290
291 def test_multiple_override(self):
292 self.d.setVar("TEST_bar", "testvalue2")
293 self.d.setVar("TEST_local", "testvalue3")
294 self.d.setVar("TEST_foo", "testvalue4")
295 bb.data.update_data(self.d)
296 self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
297
298
299class TestFlags(unittest.TestCase):
300 def setUp(self):
301 self.d = bb.data.init()
302 self.d.setVar("foo", "value of foo")
303 self.d.setVarFlag("foo", "flag1", "value of flag1")
304 self.d.setVarFlag("foo", "flag2", "value of flag2")
305
306 def test_setflag(self):
307 self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
308 self.assertEqual(self.d.getVarFlag("foo", "flag2"), "value of flag2")
309
310 def test_delflag(self):
311 self.d.delVarFlag("foo", "flag2")
312 self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
313 self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
314
315
316class Contains(unittest.TestCase):
317 def setUp(self):
318 self.d = bb.data.init()
319 self.d.setVar("SOMEFLAG", "a b c")
320
321 def test_contains(self):
322 self.assertTrue(bb.utils.contains("SOMEFLAG", "a", True, False, self.d))
323 self.assertTrue(bb.utils.contains("SOMEFLAG", "b", True, False, self.d))
324 self.assertTrue(bb.utils.contains("SOMEFLAG", "c", True, False, self.d))
325
326 self.assertTrue(bb.utils.contains("SOMEFLAG", "a b", True, False, self.d))
327 self.assertTrue(bb.utils.contains("SOMEFLAG", "b c", True, False, self.d))
328 self.assertTrue(bb.utils.contains("SOMEFLAG", "c a", True, False, self.d))
329
330 self.assertTrue(bb.utils.contains("SOMEFLAG", "a b c", True, False, self.d))
331 self.assertTrue(bb.utils.contains("SOMEFLAG", "c b a", True, False, self.d))
332
333 self.assertFalse(bb.utils.contains("SOMEFLAG", "x", True, False, self.d))
334 self.assertFalse(bb.utils.contains("SOMEFLAG", "a x", True, False, self.d))
335 self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b", True, False, self.d))
336 self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b a", True, False, self.d))
337
338 def test_contains_any(self):
339 self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a", True, False, self.d))
340 self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b", True, False, self.d))
341 self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c", True, False, self.d))
342
343 self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a b", True, False, self.d))
344 self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b c", True, False, self.d))
345 self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c a", True, False, self.d))
346
347 self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a x", True, False, self.d))
348 self.assertTrue(bb.utils.contains_any("SOMEFLAG", "x c", True, False, self.d))
349
350 self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x", True, False, self.d))
351 self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x y z", True, False, self.d))
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
new file mode 100644
index 0000000000..d95b43a5e3
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -0,0 +1,569 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Tests for the Fetcher (fetch2/)
5#
6# Copyright (C) 2012 Richard Purdie
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21
22import unittest
23import tempfile
24import subprocess
25import os
26from bb.fetch2 import URI
27import bb
28
29class URITest(unittest.TestCase):
30 test_uris = {
31 "http://www.google.com/index.html" : {
32 'uri': 'http://www.google.com/index.html',
33 'scheme': 'http',
34 'hostname': 'www.google.com',
35 'port': None,
36 'hostport': 'www.google.com',
37 'path': '/index.html',
38 'userinfo': '',
39 'username': '',
40 'password': '',
41 'params': {},
42 'query': {},
43 'relative': False
44 },
45 "http://www.google.com/index.html;param1=value1" : {
46 'uri': 'http://www.google.com/index.html;param1=value1',
47 'scheme': 'http',
48 'hostname': 'www.google.com',
49 'port': None,
50 'hostport': 'www.google.com',
51 'path': '/index.html',
52 'userinfo': '',
53 'username': '',
54 'password': '',
55 'params': {
56 'param1': 'value1'
57 },
58 'query': {},
59 'relative': False
60 },
61 "http://www.example.org/index.html?param1=value1" : {
62 'uri': 'http://www.example.org/index.html?param1=value1',
63 'scheme': 'http',
64 'hostname': 'www.example.org',
65 'port': None,
66 'hostport': 'www.example.org',
67 'path': '/index.html',
68 'userinfo': '',
69 'username': '',
70 'password': '',
71 'params': {},
72 'query': {
73 'param1': 'value1'
74 },
75 'relative': False
76 },
77 "http://www.example.org/index.html?qparam1=qvalue1;param2=value2" : {
78 'uri': 'http://www.example.org/index.html?qparam1=qvalue1;param2=value2',
79 'scheme': 'http',
80 'hostname': 'www.example.org',
81 'port': None,
82 'hostport': 'www.example.org',
83 'path': '/index.html',
84 'userinfo': '',
85 'username': '',
86 'password': '',
87 'params': {
88 'param2': 'value2'
89 },
90 'query': {
91 'qparam1': 'qvalue1'
92 },
93 'relative': False
94 },
95 "http://www.example.com:8080/index.html" : {
96 'uri': 'http://www.example.com:8080/index.html',
97 'scheme': 'http',
98 'hostname': 'www.example.com',
99 'port': 8080,
100 'hostport': 'www.example.com:8080',
101 'path': '/index.html',
102 'userinfo': '',
103 'username': '',
104 'password': '',
105 'params': {},
106 'query': {},
107 'relative': False
108 },
109 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : {
110 'uri': 'cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg',
111 'scheme': 'cvs',
112 'hostname': 'cvs.handhelds.org',
113 'port': None,
114 'hostport': 'cvs.handhelds.org',
115 'path': '/cvs',
116 'userinfo': 'anoncvs',
117 'username': 'anoncvs',
118 'password': '',
119 'params': {
120 'module': 'familiar/dist/ipkg'
121 },
122 'query': {},
123 'relative': False
124 },
125 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg": {
126 'uri': 'cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg',
127 'scheme': 'cvs',
128 'hostname': 'cvs.handhelds.org',
129 'port': None,
130 'hostport': 'cvs.handhelds.org',
131 'path': '/cvs',
132 'userinfo': 'anoncvs:anonymous',
133 'username': 'anoncvs',
134 'password': 'anonymous',
135 'params': {
136 'tag': 'V0-99-81',
137 'module': 'familiar/dist/ipkg'
138 },
139 'query': {},
140 'relative': False
141 },
142 "file://example.diff": { # NOTE: Not RFC compliant!
143 'uri': 'file:example.diff',
144 'scheme': 'file',
145 'hostname': '',
146 'port': None,
147 'hostport': '',
148 'path': 'example.diff',
149 'userinfo': '',
150 'username': '',
151 'password': '',
152 'params': {},
153 'query': {},
154 'relative': True
155 },
156 "file:example.diff": { # NOTE: RFC compliant version of the former
157 'uri': 'file:example.diff',
158 'scheme': 'file',
159 'hostname': '',
160 'port': None,
161 'hostport': '',
162 'path': 'example.diff',
163 'userinfo': '',
164 'userinfo': '',
165 'username': '',
166 'password': '',
167 'params': {},
168 'query': {},
169 'relative': True
170 },
171 "file:///tmp/example.diff": {
172 'uri': 'file:///tmp/example.diff',
173 'scheme': 'file',
174 'hostname': '',
175 'port': None,
176 'hostport': '',
177 'path': '/tmp/example.diff',
178 'userinfo': '',
179 'userinfo': '',
180 'username': '',
181 'password': '',
182 'params': {},
183 'query': {},
184 'relative': False
185 },
186 "git:///path/example.git": {
187 'uri': 'git:///path/example.git',
188 'scheme': 'git',
189 'hostname': '',
190 'port': None,
191 'hostport': '',
192 'path': '/path/example.git',
193 'userinfo': '',
194 'userinfo': '',
195 'username': '',
196 'password': '',
197 'params': {},
198 'query': {},
199 'relative': False
200 },
201 "git:path/example.git": {
202 'uri': 'git:path/example.git',
203 'scheme': 'git',
204 'hostname': '',
205 'port': None,
206 'hostport': '',
207 'path': 'path/example.git',
208 'userinfo': '',
209 'userinfo': '',
210 'username': '',
211 'password': '',
212 'params': {},
213 'query': {},
214 'relative': True
215 },
216 "git://example.net/path/example.git": {
217 'uri': 'git://example.net/path/example.git',
218 'scheme': 'git',
219 'hostname': 'example.net',
220 'port': None,
221 'hostport': 'example.net',
222 'path': '/path/example.git',
223 'userinfo': '',
224 'userinfo': '',
225 'username': '',
226 'password': '',
227 'params': {},
228 'query': {},
229 'relative': False
230 }
231 }
232
233 def test_uri(self):
234 for test_uri, ref in self.test_uris.items():
235 uri = URI(test_uri)
236
237 self.assertEqual(str(uri), ref['uri'])
238
239 # expected attributes
240 self.assertEqual(uri.scheme, ref['scheme'])
241
242 self.assertEqual(uri.userinfo, ref['userinfo'])
243 self.assertEqual(uri.username, ref['username'])
244 self.assertEqual(uri.password, ref['password'])
245
246 self.assertEqual(uri.hostname, ref['hostname'])
247 self.assertEqual(uri.port, ref['port'])
248 self.assertEqual(uri.hostport, ref['hostport'])
249
250 self.assertEqual(uri.path, ref['path'])
251 self.assertEqual(uri.params, ref['params'])
252
253 self.assertEqual(uri.relative, ref['relative'])
254
255 def test_dict(self):
256 for test in self.test_uris.values():
257 uri = URI()
258
259 self.assertEqual(uri.scheme, '')
260 self.assertEqual(uri.userinfo, '')
261 self.assertEqual(uri.username, '')
262 self.assertEqual(uri.password, '')
263 self.assertEqual(uri.hostname, '')
264 self.assertEqual(uri.port, None)
265 self.assertEqual(uri.path, '')
266 self.assertEqual(uri.params, {})
267
268
269 uri.scheme = test['scheme']
270 self.assertEqual(uri.scheme, test['scheme'])
271
272 uri.userinfo = test['userinfo']
273 self.assertEqual(uri.userinfo, test['userinfo'])
274 self.assertEqual(uri.username, test['username'])
275 self.assertEqual(uri.password, test['password'])
276
277 # make sure changing the values doesn't do anything unexpected
278 uri.username = 'changeme'
279 self.assertEqual(uri.username, 'changeme')
280 self.assertEqual(uri.password, test['password'])
281 uri.password = 'insecure'
282 self.assertEqual(uri.username, 'changeme')
283 self.assertEqual(uri.password, 'insecure')
284
285 # reset back after our trickery
286 uri.userinfo = test['userinfo']
287 self.assertEqual(uri.userinfo, test['userinfo'])
288 self.assertEqual(uri.username, test['username'])
289 self.assertEqual(uri.password, test['password'])
290
291 uri.hostname = test['hostname']
292 self.assertEqual(uri.hostname, test['hostname'])
293 self.assertEqual(uri.hostport, test['hostname'])
294
295 uri.port = test['port']
296 self.assertEqual(uri.port, test['port'])
297 self.assertEqual(uri.hostport, test['hostport'])
298
299 uri.path = test['path']
300 self.assertEqual(uri.path, test['path'])
301
302 uri.params = test['params']
303 self.assertEqual(uri.params, test['params'])
304
305 uri.query = test['query']
306 self.assertEqual(uri.query, test['query'])
307
308 self.assertEqual(str(uri), test['uri'])
309
310 uri.params = {}
311 self.assertEqual(uri.params, {})
312 self.assertEqual(str(uri), (str(uri).split(";"))[0])
313
314class FetcherTest(unittest.TestCase):
315
316 def setUp(self):
317 self.d = bb.data.init()
318 self.tempdir = tempfile.mkdtemp()
319 self.dldir = os.path.join(self.tempdir, "download")
320 os.mkdir(self.dldir)
321 self.d.setVar("DL_DIR", self.dldir)
322 self.unpackdir = os.path.join(self.tempdir, "unpacked")
323 os.mkdir(self.unpackdir)
324 persistdir = os.path.join(self.tempdir, "persistdata")
325 self.d.setVar("PERSISTENT_DIR", persistdir)
326
327 def tearDown(self):
328 bb.utils.prunedir(self.tempdir)
329
330class MirrorUriTest(FetcherTest):
331
332 replaceuris = {
333 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/")
334 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
335 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
336 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
337 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
338 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
339 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
340 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
341 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
342 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890",
343 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
344 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
345 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
346 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
347 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
348 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
349 ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
350 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
351 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist")
352 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
353 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
354 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
355 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
356 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
357 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
358 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
359 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
360 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
361
362 #Renaming files doesn't work
363 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
364 #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
365 }
366
367 mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \
368 "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \
369 "https://.*/.* file:///someotherpath/downloads/ \n" \
370 "http://.*/.* file:///someotherpath/downloads/ \n"
371
372 def test_urireplace(self):
373 for k, v in self.replaceuris.items():
374 ud = bb.fetch.FetchData(k[0], self.d)
375 ud.setup_localpath(self.d)
376 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2]))
377 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d)
378 self.assertEqual([v], newuris)
379
380 def test_urilist1(self):
381 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
382 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
383 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
384 self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz', 'file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
385
386 def test_urilist2(self):
387 # Catch https:// -> files:// bug
388 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
389 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
390 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
391 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
392
393
394class FetcherLocalTest(FetcherTest):
395 def setUp(self):
396 def touch(fn):
397 with file(fn, 'a'):
398 os.utime(fn, None)
399
400 super(FetcherLocalTest, self).setUp()
401 self.localsrcdir = os.path.join(self.tempdir, 'localsrc')
402 os.makedirs(self.localsrcdir)
403 touch(os.path.join(self.localsrcdir, 'a'))
404 touch(os.path.join(self.localsrcdir, 'b'))
405 os.makedirs(os.path.join(self.localsrcdir, 'dir'))
406 touch(os.path.join(self.localsrcdir, 'dir', 'c'))
407 touch(os.path.join(self.localsrcdir, 'dir', 'd'))
408 os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir'))
409 touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e'))
410 self.d.setVar("FILESPATH", self.localsrcdir)
411
412 def fetchUnpack(self, uris):
413 fetcher = bb.fetch.Fetch(uris, self.d)
414 fetcher.download()
415 fetcher.unpack(self.unpackdir)
416 flst = []
417 for root, dirs, files in os.walk(self.unpackdir):
418 for f in files:
419 flst.append(os.path.relpath(os.path.join(root, f), self.unpackdir))
420 flst.sort()
421 return flst
422
423 def test_local(self):
424 tree = self.fetchUnpack(['file://a', 'file://dir/c'])
425 self.assertEqual(tree, ['a', 'dir/c'])
426
427 def test_local_wildcard(self):
428 tree = self.fetchUnpack(['file://a', 'file://dir/*'])
429 # FIXME: this is broken - it should return ['a', 'dir/c', 'dir/d', 'dir/subdir/e']
430 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=6128
431 self.assertEqual(tree, ['a', 'b', 'dir/c', 'dir/d', 'dir/subdir/e'])
432
433 def test_local_dir(self):
434 tree = self.fetchUnpack(['file://a', 'file://dir'])
435 self.assertEqual(tree, ['a', 'dir/c', 'dir/d', 'dir/subdir/e'])
436
437 def test_local_subdir(self):
438 tree = self.fetchUnpack(['file://dir/subdir'])
439 # FIXME: this is broken - it should return ['dir/subdir/e']
440 # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=6129
441 self.assertEqual(tree, ['subdir/e'])
442
443 def test_local_subdir_file(self):
444 tree = self.fetchUnpack(['file://dir/subdir/e'])
445 self.assertEqual(tree, ['dir/subdir/e'])
446
447 def test_local_subdirparam(self):
448 tree = self.fetchUnpack(['file://a;subdir=bar'])
449 self.assertEqual(tree, ['bar/a'])
450
451 def test_local_deepsubdirparam(self):
452 tree = self.fetchUnpack(['file://dir/subdir/e;subdir=bar'])
453 self.assertEqual(tree, ['bar/dir/subdir/e'])
454
455class FetcherNetworkTest(FetcherTest):
456
457 if os.environ.get("BB_SKIP_NETTESTS") == "yes":
458 print("Unset BB_SKIP_NETTESTS to run network tests")
459 else:
460 def test_fetch(self):
461 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
462 fetcher.download()
463 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
464 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
465 self.d.setVar("BB_NO_NETWORK", "1")
466 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
467 fetcher.download()
468 fetcher.unpack(self.unpackdir)
469 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
470 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.1/")), 9)
471
472 def test_fetch_mirror(self):
473 self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
474 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
475 fetcher.download()
476 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
477
478 def test_fetch_premirror(self):
479 self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
480 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
481 fetcher.download()
482 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
483
484 def gitfetcher(self, url1, url2):
485 def checkrevision(self, fetcher):
486 fetcher.unpack(self.unpackdir)
487 revision = bb.process.run("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git")[0].strip()
488 self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
489
490 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
491 self.d.setVar("SRCREV", "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
492 fetcher = bb.fetch.Fetch([url1], self.d)
493 fetcher.download()
494 checkrevision(self, fetcher)
495 # Wipe out the dldir clone and the unpacked source, turn off the network and check mirror tarball works
496 bb.utils.prunedir(self.dldir + "/git2/")
497 bb.utils.prunedir(self.unpackdir)
498 self.d.setVar("BB_NO_NETWORK", "1")
499 fetcher = bb.fetch.Fetch([url2], self.d)
500 fetcher.download()
501 checkrevision(self, fetcher)
502
503 def test_gitfetch(self):
504 url1 = url2 = "git://git.openembedded.org/bitbake"
505 self.gitfetcher(url1, url2)
506
507 def test_gitfetch_goodsrcrev(self):
508 # SRCREV is set but matches rev= parameter
509 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
510 self.gitfetcher(url1, url2)
511
512 def test_gitfetch_badsrcrev(self):
513 # SRCREV is set but does not match rev= parameter
514 url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5"
515 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
516
517 def test_gitfetch_tagandrev(self):
518 # SRCREV is set but does not match rev= parameter
519 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
520 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
521
522 def test_gitfetch_premirror(self):
523 url1 = "git://git.openembedded.org/bitbake"
524 url2 = "git://someserver.org/bitbake"
525 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
526 self.gitfetcher(url1, url2)
527
528 def test_gitfetch_premirror2(self):
529 url1 = url2 = "git://someserver.org/bitbake"
530 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
531 self.gitfetcher(url1, url2)
532
533 def test_gitfetch_premirror3(self):
534 realurl = "git://git.openembedded.org/bitbake"
535 dummyurl = "git://someserver.org/bitbake"
536 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
537 os.chdir(self.tempdir)
538 bb.process.run("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True)
539 self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (dummyurl, self.sourcedir))
540 self.gitfetcher(dummyurl, dummyurl)
541
542 def test_git_submodule(self):
543 fetcher = bb.fetch.Fetch(["gitsm://git.yoctoproject.org/git-submodule-test;rev=f12e57f2edf0aa534cf1616fa983d165a92b0842"], self.d)
544 fetcher.download()
545 # Previous cwd has been deleted
546 os.chdir(os.path.dirname(self.unpackdir))
547 fetcher.unpack(self.unpackdir)
548
549class URLHandle(unittest.TestCase):
550
551 datatable = {
552 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
553 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
554 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}),
555 "git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'})
556 }
557
558 def test_decodeurl(self):
559 for k, v in self.datatable.items():
560 result = bb.fetch.decodeurl(k)
561 self.assertEqual(result, v)
562
563 def test_encodeurl(self):
564 for k, v in self.datatable.items():
565 result = bb.fetch.encodeurl(v)
566 self.assertEqual(result, k)
567
568
569
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py
new file mode 100644
index 0000000000..677b3872ba
--- /dev/null
+++ b/bitbake/lib/bb/tests/utils.py
@@ -0,0 +1,103 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Tests for utils.py
5#
6# Copyright (C) 2012 Richard Purdie
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21
22import unittest
23import bb
24import os
25
26class VerCmpString(unittest.TestCase):
27
28 def test_vercmpstring(self):
29 result = bb.utils.vercmp_string('1', '2')
30 self.assertTrue(result < 0)
31 result = bb.utils.vercmp_string('2', '1')
32 self.assertTrue(result > 0)
33 result = bb.utils.vercmp_string('1', '1.0')
34 self.assertTrue(result < 0)
35 result = bb.utils.vercmp_string('1', '1.1')
36 self.assertTrue(result < 0)
37 result = bb.utils.vercmp_string('1.1', '1_p2')
38 self.assertTrue(result < 0)
39
40 def test_explode_dep_versions(self):
41 correctresult = {"foo" : ["= 1.10"]}
42 result = bb.utils.explode_dep_versions2("foo (= 1.10)")
43 self.assertEqual(result, correctresult)
44 result = bb.utils.explode_dep_versions2("foo (=1.10)")
45 self.assertEqual(result, correctresult)
46 result = bb.utils.explode_dep_versions2("foo ( = 1.10)")
47 self.assertEqual(result, correctresult)
48 result = bb.utils.explode_dep_versions2("foo ( =1.10)")
49 self.assertEqual(result, correctresult)
50 result = bb.utils.explode_dep_versions2("foo ( = 1.10 )")
51 self.assertEqual(result, correctresult)
52 result = bb.utils.explode_dep_versions2("foo ( =1.10 )")
53 self.assertEqual(result, correctresult)
54
55 def test_vercmp_string_op(self):
56 compareops = [('1', '1', '=', True),
57 ('1', '1', '==', True),
58 ('1', '1', '!=', False),
59 ('1', '1', '>', False),
60 ('1', '1', '<', False),
61 ('1', '1', '>=', True),
62 ('1', '1', '<=', True),
63 ('1', '0', '=', False),
64 ('1', '0', '==', False),
65 ('1', '0', '!=', True),
66 ('1', '0', '>', True),
67 ('1', '0', '<', False),
68 ('1', '0', '>>', True),
69 ('1', '0', '<<', False),
70 ('1', '0', '>=', True),
71 ('1', '0', '<=', False),
72 ('0', '1', '=', False),
73 ('0', '1', '==', False),
74 ('0', '1', '!=', True),
75 ('0', '1', '>', False),
76 ('0', '1', '<', True),
77 ('0', '1', '>>', False),
78 ('0', '1', '<<', True),
79 ('0', '1', '>=', False),
80 ('0', '1', '<=', True)]
81
82 for arg1, arg2, op, correctresult in compareops:
83 result = bb.utils.vercmp_string_op(arg1, arg2, op)
84 self.assertEqual(result, correctresult, 'vercmp_string_op("%s", "%s", "%s") != %s' % (arg1, arg2, op, correctresult))
85
86 # Check that clearly invalid operator raises an exception
87 self.assertRaises(bb.utils.VersionStringException, bb.utils.vercmp_string_op, '0', '0', '$')
88
89
90class Path(unittest.TestCase):
91 def test_unsafe_delete_path(self):
92 checkitems = [('/', True),
93 ('//', True),
94 ('///', True),
95 (os.getcwd().count(os.sep) * ('..' + os.sep), True),
96 (os.environ.get('HOME', '/home/test'), True),
97 ('/home/someone', True),
98 ('/home/other/', True),
99 ('/home/other/subdir', False),
100 ('', False)]
101 for arg1, correctresult in checkitems:
102 result = bb.utils._check_unsafe_delete_path(arg1)
103 self.assertEqual(result, correctresult, '_check_unsafe_delete_path("%s") != %s' % (arg1, correctresult))
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
new file mode 100644
index 0000000000..6bcbd47ab3
--- /dev/null
+++ b/bitbake/lib/bb/tinfoil.py
@@ -0,0 +1,99 @@
1# tinfoil: a simple wrapper around cooker for bitbake-based command-line utilities
2#
3# Copyright (C) 2012 Intel Corporation
4# Copyright (C) 2011 Mentor Graphics Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import logging
20import warnings
21import os
22import sys
23
24import bb.cache
25import bb.cooker
26import bb.providers
27import bb.utils
28from bb.cooker import state, BBCooker, CookerFeatures
29from bb.cookerdata import CookerConfiguration, ConfigParameters
30import bb.fetch2
31
32class Tinfoil:
33 def __init__(self, output=sys.stdout, tracking=False):
34 # Needed to avoid deprecation warnings with python 2.6
35 warnings.filterwarnings("ignore", category=DeprecationWarning)
36
37 # Set up logging
38 self.logger = logging.getLogger('BitBake')
39 console = logging.StreamHandler(output)
40 bb.msg.addDefaultlogFilter(console)
41 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
42 if output.isatty():
43 format.enable_color()
44 console.setFormatter(format)
45 self.logger.addHandler(console)
46
47 self.config = CookerConfiguration()
48 configparams = TinfoilConfigParameters(parse_only=True)
49 self.config.setConfigParameters(configparams)
50 self.config.setServerRegIdleCallback(self.register_idle_function)
51 features = []
52 if tracking:
53 features.append(CookerFeatures.BASEDATASTORE_TRACKING)
54 self.cooker = BBCooker(self.config, features)
55 self.config_data = self.cooker.data
56 bb.providers.logger.setLevel(logging.ERROR)
57 self.cooker_data = None
58
59 def register_idle_function(self, function, data):
60 pass
61
62 def parseRecipes(self):
63 sys.stderr.write("Parsing recipes..")
64 self.logger.setLevel(logging.WARNING)
65
66 try:
67 while self.cooker.state in (state.initial, state.parsing):
68 self.cooker.updateCache()
69 except KeyboardInterrupt:
70 self.cooker.shutdown()
71 self.cooker.updateCache()
72 sys.exit(2)
73
74 self.logger.setLevel(logging.INFO)
75 sys.stderr.write("done.\n")
76
77 self.cooker_data = self.cooker.recipecache
78
79 def prepare(self, config_only = False):
80 if not self.cooker_data:
81 if config_only:
82 self.cooker.parseConfiguration()
83 self.cooker_data = self.cooker.recipecache
84 else:
85 self.parseRecipes()
86
87class TinfoilConfigParameters(ConfigParameters):
88
89 def __init__(self, **options):
90 self.initial_options = options
91 super(TinfoilConfigParameters, self).__init__()
92
93 def parseCommandLine(self):
94 class DummyOptions:
95 def __init__(self, initial_options):
96 for key, val in initial_options.items():
97 setattr(self, key, val)
98
99 return DummyOptions(self.initial_options), None
diff --git a/bitbake/lib/bb/ui/__init__.py b/bitbake/lib/bb/ui/__init__.py
new file mode 100644
index 0000000000..a4805ed028
--- /dev/null
+++ b/bitbake/lib/bb/ui/__init__.py
@@ -0,0 +1,17 @@
1#
2# BitBake UI Implementation
3#
4# Copyright (C) 2006-2007 Richard Purdie
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
new file mode 100644
index 0000000000..8ca44a1a1d
--- /dev/null
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -0,0 +1,1023 @@
1#
2# BitBake ToasterUI Implementation
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import datetime
20import sys
21import bb
22import re
23import ast
24
25os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
26
27import toaster.toastermain.settings as toaster_django_settings
28from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
29from toaster.orm.models import Target_Image_File
30from toaster.orm.models import Variable, VariableHistory
31from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
32from toaster.orm.models import Task_Dependency, Package_Dependency
33from toaster.orm.models import Recipe_Dependency
34from bb.msg import BBLogFormatter as format
35
36class NotExisting(Exception):
37 pass
38
39class ORMWrapper(object):
40 """ This class creates the dictionaries needed to store information in the database
41 following the format defined by the Django models. It is also used to save this
42 information in the database.
43 """
44
45 def __init__(self):
46 pass
47
48
49 def create_build_object(self, build_info, brbe):
50 assert 'machine' in build_info
51 assert 'distro' in build_info
52 assert 'distro_version' in build_info
53 assert 'started_on' in build_info
54 assert 'cooker_log_path' in build_info
55 assert 'build_name' in build_info
56 assert 'bitbake_version' in build_info
57
58 build = Build.objects.create(
59 machine=build_info['machine'],
60 distro=build_info['distro'],
61 distro_version=build_info['distro_version'],
62 started_on=build_info['started_on'],
63 completed_on=build_info['started_on'],
64 cooker_log_path=build_info['cooker_log_path'],
65 build_name=build_info['build_name'],
66 bitbake_version=build_info['bitbake_version'])
67
68 if brbe is not None:
69 from bldcontrol.models import BuildEnvironment, BuildRequest
70 br, be = brbe.split(":")
71 buildrequest = BuildRequest.objects.get(pk = br)
72 build.project_id = buildrequest.project_id
73 build.save()
74
75 return build
76
77 def create_target_objects(self, target_info):
78 assert 'build' in target_info
79 assert 'targets' in target_info
80
81 targets = []
82 for tgt_name in target_info['targets']:
83 tgt_object = Target.objects.create( build = target_info['build'],
84 target = tgt_name,
85 is_image = False,
86 );
87 targets.append(tgt_object)
88 return targets
89
90 def update_build_object(self, build, errors, warnings, taskfailures):
91 assert isinstance(build,Build)
92 assert isinstance(errors, int)
93 assert isinstance(warnings, int)
94
95 outcome = Build.SUCCEEDED
96 if errors or taskfailures:
97 outcome = Build.FAILED
98
99 build.completed_on = datetime.datetime.now()
100 build.timespent = int((build.completed_on - build.started_on).total_seconds())
101 build.errors_no = errors
102 build.warnings_no = warnings
103 build.outcome = outcome
104 build.save()
105
106 def update_target_object(self, target, license_manifest_path):
107
108 target.license_manifest_path = license_manifest_path
109 target.save()
110
111 def get_update_task_object(self, task_information, must_exist = False):
112 assert 'build' in task_information
113 assert 'recipe' in task_information
114 assert 'task_name' in task_information
115
116 task_object, created = Task.objects.get_or_create(
117 build=task_information['build'],
118 recipe=task_information['recipe'],
119 task_name=task_information['task_name'],
120 )
121
122 if must_exist and created:
123 task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
124 task_object.delete()
125 raise NotExisting("Task object created when expected to exist", task_information)
126
127 for v in vars(task_object):
128 if v in task_information.keys():
129 vars(task_object)[v] = task_information[v]
130
131 # update setscene-related information
132 if 1 == Task.objects.related_setscene(task_object).count():
133 if task_object.outcome == Task.OUTCOME_COVERED:
134 task_object.outcome = Task.OUTCOME_CACHED
135
136 outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
137 recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
138 if outcome_task_setscene == Task.OUTCOME_SUCCESS:
139 task_object.sstate_result = Task.SSTATE_RESTORED
140 elif outcome_task_setscene == Task.OUTCOME_FAILED:
141 task_object.sstate_result = Task.SSTATE_FAILED
142
143 # mark down duration if we have a start time and a current time
144 if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
145 duration = task_information['end_time'] - task_information['start_time']
146 task_object.elapsed_time = duration
147
148 task_object.save()
149 return task_object
150
151
152 def get_update_recipe_object(self, recipe_information, must_exist = False):
153 assert 'layer_version' in recipe_information
154 assert 'file_path' in recipe_information
155
156
157 recipe_object, created = Recipe.objects.get_or_create(
158 layer_version=recipe_information['layer_version'],
159 file_path=recipe_information['file_path'])
160
161 if must_exist and created:
162 recipe_object.delete()
163 raise NotExisting("Recipe object created when expected to exist", recipe_information)
164
165 for v in vars(recipe_object):
166 if v in recipe_information.keys():
167 vars(recipe_object)[v] = recipe_information[v]
168
169 recipe_object.save()
170
171 return recipe_object
172
173 def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
174 assert isinstance(build_obj, Build)
175 assert isinstance(layer_obj, Layer)
176 assert 'branch' in layer_version_information
177 assert 'commit' in layer_version_information
178 assert 'priority' in layer_version_information
179
180 layer_version_object, created = Layer_Version.objects.get_or_create(
181 build = build_obj,
182 layer = layer_obj,
183 branch = layer_version_information['branch'],
184 commit = layer_version_information['commit'],
185 priority = layer_version_information['priority']
186 )
187
188 return layer_version_object
189
190 def get_update_layer_object(self, layer_information):
191 assert 'name' in layer_information
192 assert 'local_path' in layer_information
193 assert 'layer_index_url' in layer_information
194
195 layer_object, created = Layer.objects.get_or_create(
196 name=layer_information['name'],
197 local_path=layer_information['local_path'],
198 layer_index_url=layer_information['layer_index_url'])
199
200 return layer_object
201
202 def save_target_file_information(self, build_obj, target_obj, filedata):
203 assert isinstance(build_obj, Build)
204 assert isinstance(target_obj, Target)
205 dirs = filedata['dirs']
206 files = filedata['files']
207 syms = filedata['syms']
208
209 # we insert directories, ordered by name depth
210 for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
211 (user, group, size) = d[1:4]
212 permission = d[0][1:]
213 path = d[4].lstrip(".")
214 if len(path) == 0:
215 # we create the root directory as a special case
216 path = "/"
217 tf_obj = Target_File.objects.create(
218 target = target_obj,
219 path = path,
220 size = size,
221 inodetype = Target_File.ITYPE_DIRECTORY,
222 permission = permission,
223 owner = user,
224 group = group,
225 )
226 tf_obj.directory = tf_obj
227 tf_obj.save()
228 continue
229 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
230 if len(parent_path) == 0:
231 parent_path = "/"
232 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
233 tf_obj = Target_File.objects.create(
234 target = target_obj,
235 path = path,
236 size = size,
237 inodetype = Target_File.ITYPE_DIRECTORY,
238 permission = permission,
239 owner = user,
240 group = group,
241 directory = parent_obj)
242
243
244 # we insert files
245 for d in files:
246 (user, group, size) = d[1:4]
247 permission = d[0][1:]
248 path = d[4].lstrip(".")
249 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
250 inodetype = Target_File.ITYPE_REGULAR
251 if d[0].startswith('b'):
252 inodetype = Target_File.ITYPE_BLOCK
253 if d[0].startswith('c'):
254 inodetype = Target_File.ITYPE_CHARACTER
255 if d[0].startswith('p'):
256 inodetype = Target_File.ITYPE_FIFO
257
258 tf_obj = Target_File.objects.create(
259 target = target_obj,
260 path = path,
261 size = size,
262 inodetype = inodetype,
263 permission = permission,
264 owner = user,
265 group = group)
266 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
267 tf_obj.directory = parent_obj
268 tf_obj.save()
269
270 # we insert symlinks
271 for d in syms:
272 (user, group, size) = d[1:4]
273 permission = d[0][1:]
274 path = d[4].lstrip(".")
275 filetarget_path = d[6]
276
277 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
278 if not filetarget_path.startswith("/"):
279 # we have a relative path, get a normalized absolute one
280 filetarget_path = parent_path + "/" + filetarget_path
281 fcp = filetarget_path.split("/")
282 fcpl = []
283 for i in fcp:
284 if i == "..":
285 fcpl.pop()
286 else:
287 fcpl.append(i)
288 filetarget_path = "/".join(fcpl)
289
290 try:
291 filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
292 except Exception as e:
293 # we might have an invalid link; no way to detect this. just set it to None
294 filetarget_obj = None
295
296 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
297
298 tf_obj = Target_File.objects.create(
299 target = target_obj,
300 path = path,
301 size = size,
302 inodetype = Target_File.ITYPE_SYMLINK,
303 permission = permission,
304 owner = user,
305 group = group,
306 directory = parent_obj,
307 sym_target = filetarget_obj)
308
309
310 def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes):
311 assert isinstance(build_obj, Build)
312 assert isinstance(target_obj, Target)
313
314 errormsg = ""
315 for p in packagedict:
316 searchname = p
317 if 'OPKGN' in pkgpnmap[p].keys():
318 searchname = pkgpnmap[p]['OPKGN']
319
320 packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
321 if created:
322 # package was not build in the current build, but
323 # fill in everything we can from the runtime-reverse package data
324 try:
325 packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
326 packagedict[p]['object'].version = pkgpnmap[p]['PV']
327 packagedict[p]['object'].installed_name = p
328 packagedict[p]['object'].revision = pkgpnmap[p]['PR']
329 packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
330 packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
331 packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
332 packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
333 packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
334
335 # no files recorded for this package, so save files info
336 for targetpath in pkgpnmap[p]['FILES_INFO']:
337 targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
338 Package_File.objects.create( package = packagedict[p]['object'],
339 path = targetpath,
340 size = targetfilesize)
341 except KeyError as e:
342 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
343
344 # save disk installed size
345 packagedict[p]['object'].installed_size = packagedict[p]['size']
346 packagedict[p]['object'].save()
347
348 Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
349
350 for p in packagedict:
351 for (px,deptype) in packagedict[p]['depends']:
352 if deptype == 'depends':
353 tdeptype = Package_Dependency.TYPE_TRDEPENDS
354 elif deptype == 'recommends':
355 tdeptype = Package_Dependency.TYPE_TRECOMMENDS
356
357 Package_Dependency.objects.create( package = packagedict[p]['object'],
358 depends_on = packagedict[px]['object'],
359 dep_type = tdeptype,
360 target = target_obj);
361
362 if (len(errormsg) > 0):
363 raise Exception(errormsg)
364
365 def save_target_image_file_information(self, target_obj, file_name, file_size):
366 target_image_file = Target_Image_File.objects.create( target = target_obj,
367 file_name = file_name,
368 file_size = file_size)
369 target_image_file.save()
370
371 def create_logmessage(self, log_information):
372 assert 'build' in log_information
373 assert 'level' in log_information
374 assert 'message' in log_information
375
376 log_object = LogMessage.objects.create(
377 build = log_information['build'],
378 level = log_information['level'],
379 message = log_information['message'])
380
381 for v in vars(log_object):
382 if v in log_information.keys():
383 vars(log_object)[v] = log_information[v]
384
385 return log_object.save()
386
387
388 def save_build_package_information(self, build_obj, package_info, recipes):
389 assert isinstance(build_obj, Build)
390
391 # create and save the object
392 pname = package_info['PKG']
393 if 'OPKGN' in package_info.keys():
394 pname = package_info['OPKGN']
395
396 bp_object, created = Package.objects.get_or_create( build = build_obj,
397 name = pname )
398
399 bp_object.installed_name = package_info['PKG']
400 bp_object.recipe = recipes[package_info['PN']]
401 bp_object.version = package_info['PKGV']
402 bp_object.revision = package_info['PKGR']
403 bp_object.summary = package_info['SUMMARY']
404 bp_object.description = package_info['DESCRIPTION']
405 bp_object.size = int(package_info['PKGSIZE'])
406 bp_object.section = package_info['SECTION']
407 bp_object.license = package_info['LICENSE']
408 bp_object.save()
409
410 # save any attached file information
411 for path in package_info['FILES_INFO']:
412 fo = Package_File.objects.create( package = bp_object,
413 path = path,
414 size = package_info['FILES_INFO'][path] )
415
416 def _po_byname(p):
417 pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
418 if created:
419 pkg.size = -1
420 pkg.save()
421 return pkg
422
423 # save soft dependency information
424 if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
425 for p in bb.utils.explode_deps(package_info['RDEPENDS']):
426 Package_Dependency.objects.get_or_create( package = bp_object,
427 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)
428 if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
429 for p in bb.utils.explode_deps(package_info['RPROVIDES']):
430 Package_Dependency.objects.get_or_create( package = bp_object,
431 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)
432 if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
433 for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
434 Package_Dependency.objects.get_or_create( package = bp_object,
435 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)
436 if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
437 for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
438 Package_Dependency.objects.get_or_create( package = bp_object,
439 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)
440 if 'RREPLACES' in package_info and package_info['RREPLACES']:
441 for p in bb.utils.explode_deps(package_info['RREPLACES']):
442 Package_Dependency.objects.get_or_create( package = bp_object,
443 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)
444 if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
445 for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
446 Package_Dependency.objects.get_or_create( package = bp_object,
447 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)
448
449 return bp_object
450
451 def save_build_variables(self, build_obj, vardump):
452 assert isinstance(build_obj, Build)
453
454 helptext_objects = []
455
456 for k in vardump:
457 desc = vardump[k]['doc'];
458 if desc is None:
459 var_words = [word for word in k.split('_')]
460 root_var = "_".join([word for word in var_words if word.isupper()])
461 if root_var and root_var != k and root_var in vardump:
462 desc = vardump[root_var]['doc']
463 if desc is None:
464 desc = ''
465 if desc:
466 helptext_obj = HelpText.objects.create(build=build_obj,
467 area=HelpText.VARIABLE,
468 key=k,
469 text=desc)
470 if not bool(vardump[k]['func']):
471 value = vardump[k]['v'];
472 if value is None:
473 value = ''
474 variable_obj = Variable.objects.create( build = build_obj,
475 variable_name = k,
476 variable_value = value,
477 description = desc)
478 for vh in vardump[k]['history']:
479 if not 'documentation.conf' in vh['file']:
480 VariableHistory.objects.create( variable = variable_obj,
481 file_name = vh['file'],
482 line_number = vh['line'],
483 operation = vh['op'])
484
485class MockEvent: pass # sometimes we mock an event, declare it here
486
487class BuildInfoHelper(object):
488 """ This class gathers the build information from the server and sends it
489 towards the ORM wrapper for storing in the database
490 It is instantiated once per build
491 Keeps in memory all data that needs matching before writing it to the database
492 """
493
494
495 def __init__(self, server, has_build_history = False):
496 self._configure_django()
497 self.internal_state = {}
498 self.internal_state['taskdata'] = {}
499 self.task_order = 0
500 self.server = server
501 self.orm_wrapper = ORMWrapper()
502 self.has_build_history = has_build_history
503 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
504 self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
505
506
507 def _configure_django(self):
508 # Add toaster to sys path for importing modules
509 sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'toaster'))
510
511 ###################
512 ## methods to convert event/external info into objects that the ORM layer uses
513
514
515 def _get_build_information(self):
516 build_info = {}
517 # Generate an identifier for each new build
518
519 build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
520 build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
521 build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
522 build_info['started_on'] = datetime.datetime.now()
523 build_info['completed_on'] = datetime.datetime.now()
524 build_info['cooker_log_path'] = self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0]
525 build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
526 build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
527
528 return build_info
529
530 def _get_task_information(self, event, recipe):
531 assert 'taskname' in vars(event)
532
533 task_information = {}
534 task_information['build'] = self.internal_state['build']
535 task_information['outcome'] = Task.OUTCOME_NA
536 task_information['recipe'] = recipe
537 task_information['task_name'] = event.taskname
538 try:
539 # some tasks don't come with a hash. and that's ok
540 task_information['sstate_checksum'] = event.taskhash
541 except AttributeError:
542 pass
543 return task_information
544
545 def _get_layer_version_for_path(self, path):
546 assert path.startswith("/")
547 assert 'build' in self.internal_state
548
549 def _slkey(layer_version):
550 assert isinstance(layer_version, Layer_Version)
551 return len(layer_version.layer.local_path)
552
553 # Heuristics: we always match recipe to the deepest layer path that
554 # we can match to the recipe file path
555 for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey):
556 if (path.startswith(bl.layer.local_path)):
557 return bl
558
559 #if we get here, we didn't read layers correctly; mockup the new layer
560 unknown_layer, created = Layer.objects.get_or_create(name="unknown", local_path="/", layer_index_url="")
561 unknown_layer_version_obj, created = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
562
563 return unknown_layer_version_obj
564
565 def _get_recipe_information_from_taskfile(self, taskfile):
566 localfilepath = taskfile.split(":")[-1]
567 layer_version_obj = self._get_layer_version_for_path(localfilepath)
568
569 recipe_info = {}
570 recipe_info['layer_version'] = layer_version_obj
571 recipe_info['file_path'] = taskfile
572
573 return recipe_info
574
575 def _get_path_information(self, task_object):
576 assert isinstance(task_object, Task)
577 build_stats_format = "{tmpdir}/buildstats/{target}-{machine}/{buildname}/{package}/"
578 build_stats_path = []
579
580 for t in self.internal_state['targets']:
581 target = t.target
582 machine = self.internal_state['build'].machine
583 buildname = self.internal_state['build'].build_name
584 pe, pv = task_object.recipe.version.split(":",1)
585 if len(pe) > 0:
586 package = task_object.recipe.name + "-" + pe + "_" + pv
587 else:
588 package = task_object.recipe.name + "-" + pv
589
590 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir, target=target,
591 machine=machine, buildname=buildname,
592 package=package))
593
594 return build_stats_path
595
596 def _remove_redundant(self, string):
597 ret = []
598 for i in string.split():
599 if i not in ret:
600 ret.append(i)
601 return " ".join(sorted(ret))
602
603
604 ################################
605 ## external available methods to store information
606
607 def store_layer_info(self, event):
608 assert '_localdata' in vars(event)
609 layerinfos = event._localdata
610 self.internal_state['lvs'] = {}
611 for layer in layerinfos:
612 self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version']
613
614
615 def store_started_build(self, event):
616 assert '_pkgs' in vars(event)
617 build_information = self._get_build_information()
618
619 build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe)
620
621 self.internal_state['build'] = build_obj
622
623 # save layer version information for this build
624 if not 'lvs' in self.internal_state:
625 logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
626 else:
627 for layer_obj in self.internal_state['lvs']:
628 self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
629
630 del self.internal_state['lvs']
631
632 # create target information
633 target_information = {}
634 target_information['targets'] = event._pkgs
635 target_information['build'] = build_obj
636
637 self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information)
638
639 # Save build configuration
640 data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
641 self.orm_wrapper.save_build_variables(build_obj, [])
642
643 return self.brbe
644
645
646
647 def update_target_image_file(self, event):
648 image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0]
649 for t in self.internal_state['targets']:
650 if t.is_image == True:
651 output_files = list(event._localdata.viewkeys())
652 for output in output_files:
653 if t.target in output and output.split('.rootfs.')[1] in image_fstypes:
654 self.orm_wrapper.save_target_image_file_information(t, output, event._localdata[output])
655
656 def update_build_information(self, event, errors, warnings, taskfailures):
657 if 'build' in self.internal_state:
658 self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures)
659
660
661 def store_license_manifest_path(self, event):
662 deploy_dir = event._localdata['deploy_dir']
663 image_name = event._localdata['image_name']
664 path = deploy_dir + "/licenses/" + image_name + "/"
665 for target in self.internal_state['targets']:
666 if target.target in image_name:
667 self.orm_wrapper.update_target_object(target, path)
668
669
670 def store_started_task(self, event):
671 assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
672 assert 'taskfile' in vars(event)
673 localfilepath = event.taskfile.split(":")[-1]
674 assert localfilepath.startswith("/")
675
676 identifier = event.taskfile + ":" + event.taskname
677
678 recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
679 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
680
681 task_information = self._get_task_information(event, recipe)
682 task_information['outcome'] = Task.OUTCOME_NA
683
684 if isinstance(event, bb.runqueue.runQueueTaskSkipped):
685 assert 'reason' in vars(event)
686 task_information['task_executed'] = False
687 if event.reason == "covered":
688 task_information['outcome'] = Task.OUTCOME_COVERED
689 if event.reason == "existing":
690 task_information['outcome'] = Task.OUTCOME_PREBUILT
691 else:
692 task_information['task_executed'] = True
693 if 'noexec' in vars(event) and event.noexec == True:
694 task_information['task_executed'] = False
695 task_information['outcome'] = Task.OUTCOME_EMPTY
696 task_information['script_type'] = Task.CODING_NA
697
698 # do not assign order numbers to scene tasks
699 if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
700 self.task_order += 1
701 task_information['order'] = self.task_order
702
703 task_obj = self.orm_wrapper.get_update_task_object(task_information)
704
705 self.internal_state['taskdata'][identifier] = {
706 'outcome': task_information['outcome'],
707 }
708
709
710 def store_tasks_stats(self, event):
711 for (taskfile, taskname, taskstats, recipename) in event._localdata:
712 localfilepath = taskfile.split(":")[-1]
713 assert localfilepath.startswith("/")
714
715 recipe_information = self._get_recipe_information_from_taskfile(taskfile)
716 recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
717 file_path__endswith = recipe_information['file_path'],
718 name = recipename)
719
720 task_information = {}
721 task_information['build'] = self.internal_state['build']
722 task_information['recipe'] = recipe_object
723 task_information['task_name'] = taskname
724 task_information['cpu_usage'] = taskstats['cpu_usage']
725 task_information['disk_io'] = taskstats['disk_io']
726 task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist
727
728 def update_and_store_task(self, event):
729 assert 'taskfile' in vars(event)
730 localfilepath = event.taskfile.split(":")[-1]
731 assert localfilepath.startswith("/")
732
733 identifier = event.taskfile + ":" + event.taskname
734 if not identifier in self.internal_state['taskdata']:
735 if isinstance(event, bb.build.TaskBase):
736 # we do a bit of guessing
737 candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
738 if len(candidates) == 1:
739 identifier = candidates[0]
740
741 assert identifier in self.internal_state['taskdata']
742 identifierlist = identifier.split(":")
743 realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
744 recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
745 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
746 task_information = self._get_task_information(event,recipe)
747
748 if 'time' in vars(event):
749 if not 'start_time' in self.internal_state['taskdata'][identifier]:
750 self.internal_state['taskdata'][identifier]['start_time'] = event.time
751 else:
752 task_information['end_time'] = event.time
753 task_information['start_time'] = self.internal_state['taskdata'][identifier]['start_time']
754
755 task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
756
757 if 'logfile' in vars(event):
758 task_information['logfile'] = event.logfile
759
760 if '_message' in vars(event):
761 task_information['message'] = event._message
762
763 if 'taskflags' in vars(event):
764 # with TaskStarted, we get even more information
765 if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
766 task_information['script_type'] = Task.CODING_PYTHON
767 else:
768 task_information['script_type'] = Task.CODING_SHELL
769
770 if task_information['outcome'] == Task.OUTCOME_NA:
771 if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
772 task_information['outcome'] = Task.OUTCOME_SUCCESS
773 del self.internal_state['taskdata'][identifier]
774
775 if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
776 task_information['outcome'] = Task.OUTCOME_FAILED
777 del self.internal_state['taskdata'][identifier]
778
779 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
780
781
782 def store_missed_state_tasks(self, event):
783 for (fn, taskname, taskhash, sstatefile) in event._localdata['missed']:
784
785 identifier = fn + taskname + "_setscene"
786 recipe_information = self._get_recipe_information_from_taskfile(fn)
787 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
788 mevent = MockEvent()
789 mevent.taskname = taskname
790 mevent.taskhash = taskhash
791 task_information = self._get_task_information(mevent,recipe)
792
793 task_information['start_time'] = datetime.datetime.now()
794 task_information['outcome'] = Task.OUTCOME_NA
795 task_information['sstate_checksum'] = taskhash
796 task_information['sstate_result'] = Task.SSTATE_MISS
797 task_information['path_to_sstate_obj'] = sstatefile
798
799 self.orm_wrapper.get_update_task_object(task_information)
800
801 for (fn, taskname, taskhash, sstatefile) in event._localdata['found']:
802
803 identifier = fn + taskname + "_setscene"
804 recipe_information = self._get_recipe_information_from_taskfile(fn)
805 recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
806 mevent = MockEvent()
807 mevent.taskname = taskname
808 mevent.taskhash = taskhash
809 task_information = self._get_task_information(mevent,recipe)
810
811 task_information['path_to_sstate_obj'] = sstatefile
812
813 self.orm_wrapper.get_update_task_object(task_information)
814
815
816 def store_target_package_data(self, event):
817 assert '_localdata' in vars(event)
818 # for all image targets
819 for target in self.internal_state['targets']:
820 if target.is_image:
821 try:
822 pkgdata = event._localdata['pkgdata']
823 imgdata = event._localdata['imgdata'][target.target]
824 self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
825 filedata = event._localdata['filedata'][target.target]
826 self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
827 except KeyError:
828 # we must have not got the data for this image, nothing to save
829 pass
830
831
832
833 def store_dependency_information(self, event):
834 assert '_depgraph' in vars(event)
835 assert 'layer-priorities' in event._depgraph
836 assert 'pn' in event._depgraph
837 assert 'tdepends' in event._depgraph
838
839 errormsg = ""
840
841 # save layer version priorities
842 if 'layer-priorities' in event._depgraph.keys():
843 for lv in event._depgraph['layer-priorities']:
844 (name, path, regexp, priority) = lv
845 layer_version_obj = self._get_layer_version_for_path(path[1:]) # paths start with a ^
846 assert layer_version_obj is not None
847 layer_version_obj.priority = priority
848 layer_version_obj.save()
849
850 # save recipe information
851 self.internal_state['recipes'] = {}
852 for pn in event._depgraph['pn']:
853
854 file_name = event._depgraph['pn'][pn]['filename']
855 layer_version_obj = self._get_layer_version_for_path(file_name.split(":")[-1])
856
857 assert layer_version_obj is not None
858
859 recipe_info = {}
860 recipe_info['name'] = pn
861 recipe_info['layer_version'] = layer_version_obj
862
863 if 'version' in event._depgraph['pn'][pn]:
864 recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
865
866 if 'summary' in event._depgraph['pn'][pn]:
867 recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
868
869 if 'license' in event._depgraph['pn'][pn]:
870 recipe_info['license'] = event._depgraph['pn'][pn]['license']
871
872 if 'description' in event._depgraph['pn'][pn]:
873 recipe_info['description'] = event._depgraph['pn'][pn]['description']
874
875 if 'section' in event._depgraph['pn'][pn]:
876 recipe_info['section'] = event._depgraph['pn'][pn]['section']
877
878 if 'homepage' in event._depgraph['pn'][pn]:
879 recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
880
881 if 'bugtracker' in event._depgraph['pn'][pn]:
882 recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
883
884 recipe_info['file_path'] = file_name
885 recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
886 recipe.is_image = False
887 if 'inherits' in event._depgraph['pn'][pn].keys():
888 for cls in event._depgraph['pn'][pn]['inherits']:
889 if cls.endswith('/image.bbclass'):
890 recipe.is_image = True
891 break
892 if recipe.is_image:
893 for t in self.internal_state['targets']:
894 if pn == t.target:
895 t.is_image = True
896 t.save()
897 self.internal_state['recipes'][pn] = recipe
898
899 # we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
900
901 assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
902
903 # save recipe dependency
904 # buildtime
905 for recipe in event._depgraph['depends']:
906 try:
907 target = self.internal_state['recipes'][recipe]
908 for dep in event._depgraph['depends'][recipe]:
909 dependency = self.internal_state['recipes'][dep]
910 Recipe_Dependency.objects.get_or_create( recipe = target,
911 depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS)
912 except KeyError as e:
913 if e not in assume_provided and not str(e).startswith("virtual/"):
914 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
915
916 # save all task information
917 def _save_a_task(taskdesc):
918 spec = re.split(r'\.', taskdesc);
919 pn = ".".join(spec[0:-1])
920 taskname = spec[-1]
921 e = event
922 e.taskname = pn
923 recipe = self.internal_state['recipes'][pn]
924 task_info = self._get_task_information(e, recipe)
925 task_info['task_name'] = taskname
926 task_obj = self.orm_wrapper.get_update_task_object(task_info)
927 return task_obj
928
929 # create tasks
930 tasks = {}
931 for taskdesc in event._depgraph['tdepends']:
932 tasks[taskdesc] = _save_a_task(taskdesc)
933
934 # create dependencies between tasks
935 for taskdesc in event._depgraph['tdepends']:
936 target = tasks[taskdesc]
937 for taskdep in event._depgraph['tdepends'][taskdesc]:
938 if taskdep not in tasks:
939 # Fetch tasks info is not collected previously
940 dep = _save_a_task(taskdep)
941 else:
942 dep = tasks[taskdep]
943 Task_Dependency.objects.get_or_create( task = target, depends_on = dep )
944
945 if (len(errormsg) > 0):
946 raise Exception(errormsg)
947
948
949 def store_build_package_information(self, event):
950 assert '_localdata' in vars(event)
951 package_info = event._localdata
952 self.orm_wrapper.save_build_package_information(self.internal_state['build'],
953 package_info,
954 self.internal_state['recipes'],
955 )
956
957 def _store_build_done(self):
958 br_id, be_id = self.brbe.split(":")
959 from bldcontrol.models import BuildEnvironment, BuildRequest
960 be = BuildEnvironment.objects.get(pk = be_id)
961 be.lock = BuildEnvironment.LOCK_LOCK
962 be.save()
963 br = BuildRequest.objects.get(pk = br_id)
964 br.state = BuildRequest.REQ_COMPLETED
965 br.build = self.internal_state['build']
966 br.save()
967
968
969 def store_log_error(self, text):
970 mockevent = MockEvent()
971 mockevent.levelno = format.ERROR
972 mockevent.msg = text
973 self.store_log_event(mockevent)
974
975 def store_log_event(self, event):
976 if event.levelno < format.WARNING:
977 return
978
979 if 'args' in vars(event):
980 event.msg = event.msg % event.args
981
982 if not 'build' in self.internal_state:
983 if self.brbe is None:
984 if not 'backlog' in self.internal_state:
985 self.internal_state['backlog'] = []
986 self.internal_state['backlog'].append(event)
987 else: # we're under Toaster control, post the errors to the build request
988 from bldcontrol.models import BuildRequest, BRError
989 br, be = brbe.split(":")
990 buildrequest = BuildRequest.objects.get(pk = br)
991 brerror = BRError.objects.create(req = buildrequest, errtype="build", errmsg = event.msg)
992
993 return
994
995 if 'build' in self.internal_state and 'backlog' in self.internal_state:
996 if len(self.internal_state['backlog']):
997 tempevent = self.internal_state['backlog'].pop()
998 print " Saving stored event ", tempevent
999 self.store_log_event(tempevent)
1000 else:
1001 del self.internal_state['backlog']
1002
1003 log_information = {}
1004 log_information['build'] = self.internal_state['build']
1005 if event.levelno == format.ERROR:
1006 log_information['level'] = LogMessage.ERROR
1007 elif event.levelno == format.WARNING:
1008 log_information['level'] = LogMessage.WARNING
1009 else:
1010 log_information['level'] = LogMessage.INFO
1011
1012 log_information['message'] = event.msg
1013 log_information['pathname'] = event.pathname
1014 log_information['lineno'] = event.lineno
1015 self.orm_wrapper.create_logmessage(log_information)
1016
1017 def close(self):
1018 if self.brbe is not None:
1019 buildinfohelper._store_build_done()
1020
1021 if 'backlog' in self.internal_state:
1022 for event in self.internal_state['backlog']:
1023 logger.error("Unsaved log: %s", event.msg)
diff --git a/bitbake/lib/bb/ui/crumbs/__init__.py b/bitbake/lib/bb/ui/crumbs/__init__.py
new file mode 100644
index 0000000000..b7cbe1a4f3
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/__init__.py
@@ -0,0 +1,17 @@
1#
2# Gtk+ UI pieces for BitBake
3#
4# Copyright (C) 2006-2007 Richard Purdie
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/bitbake/lib/bb/ui/crumbs/builddetailspage.py b/bitbake/lib/bb/ui/crumbs/builddetailspage.py
new file mode 100755
index 0000000000..7fc690e2fa
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/builddetailspage.py
@@ -0,0 +1,437 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import pango
25import gobject
26import bb.process
27from bb.ui.crumbs.progressbar import HobProgressBar
28from bb.ui.crumbs.hobwidget import hic, HobNotebook, HobAltButton, HobWarpCellRendererText, HobButton, HobInfoButton
29from bb.ui.crumbs.runningbuild import RunningBuildTreeView
30from bb.ui.crumbs.runningbuild import BuildFailureTreeView
31from bb.ui.crumbs.hobpages import HobPage
32from bb.ui.crumbs.hobcolor import HobColors
33
34class BuildConfigurationTreeView(gtk.TreeView):
35 def __init__ (self):
36 gtk.TreeView.__init__(self)
37 self.set_rules_hint(False)
38 self.set_headers_visible(False)
39 self.set_property("hover-expand", True)
40 self.get_selection().set_mode(gtk.SELECTION_SINGLE)
41
42 # The icon that indicates whether we're building or failed.
43 renderer0 = gtk.CellRendererText()
44 renderer0.set_property('font-desc', pango.FontDescription('courier bold 12'))
45 col0 = gtk.TreeViewColumn ("Name", renderer0, text=0)
46 self.append_column (col0)
47
48 # The message of configuration.
49 renderer1 = HobWarpCellRendererText(col_number=1)
50 col1 = gtk.TreeViewColumn ("Values", renderer1, text=1)
51 self.append_column (col1)
52
53 def set_vars(self, key="", var=[""]):
54 d = {}
55 if type(var) == str:
56 d = {key: [var]}
57 elif type(var) == list and len(var) > 1:
58 #create the sub item line
59 l = []
60 text = ""
61 for item in var:
62 text = " - " + item
63 l.append(text)
64 d = {key: var}
65
66 return d
67
68 def set_config_model(self, show_vars):
69 listmodel = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
70 parent = None
71 for var in show_vars:
72 for subitem in var.items():
73 name = subitem[0]
74 is_parent = True
75 for value in subitem[1]:
76 if is_parent:
77 parent = listmodel.append(parent, (name, value))
78 is_parent = False
79 else:
80 listmodel.append(parent, (None, value))
81 name = " - "
82 parent = None
83 # renew the tree model after get the configuration messages
84 self.set_model(listmodel)
85
86 def show(self, src_config_info, src_params):
87 vars = []
88 vars.append(self.set_vars("BB version:", src_params.bb_version))
89 vars.append(self.set_vars("Target arch:", src_params.target_arch))
90 vars.append(self.set_vars("Target OS:", src_params.target_os))
91 vars.append(self.set_vars("Machine:", src_config_info.curr_mach))
92 vars.append(self.set_vars("Distro:", src_config_info.curr_distro))
93 vars.append(self.set_vars("Distro version:", src_params.distro_version))
94 vars.append(self.set_vars("SDK machine:", src_config_info.curr_sdk_machine))
95 vars.append(self.set_vars("Tune features:", src_params.tune_pkgarch))
96 vars.append(self.set_vars("Layers:", src_config_info.layers))
97
98 for path in src_config_info.layers:
99 import os, os.path
100 if os.path.exists(path):
101 branch = bb.process.run('cd %s; git branch | grep "^* " | tr -d "* "' % path)[0]
102 if branch.startswith("fatal:"):
103 branch = "(unknown)"
104 if branch:
105 branch = branch.strip('\n')
106 vars.append(self.set_vars("Branch:", branch))
107 break
108
109 self.set_config_model(vars)
110
111 def reset(self):
112 self.set_model(None)
113
114#
115# BuildDetailsPage
116#
117
118class BuildDetailsPage (HobPage):
119
120 def __init__(self, builder):
121 super(BuildDetailsPage, self).__init__(builder, "Building ...")
122
123 self.num_of_issues = 0
124 self.endpath = (0,)
125 # create visual elements
126 self.create_visual_elements()
127
128 def create_visual_elements(self):
129 # create visual elements
130 self.vbox = gtk.VBox(False, 12)
131
132 self.progress_box = gtk.VBox(False, 12)
133 self.task_status = gtk.Label("\n") # to ensure layout is correct
134 self.task_status.set_alignment(0.0, 0.5)
135 self.progress_box.pack_start(self.task_status, expand=False, fill=False)
136 self.progress_hbox = gtk.HBox(False, 6)
137 self.progress_box.pack_end(self.progress_hbox, expand=True, fill=True)
138 self.progress_bar = HobProgressBar()
139 self.progress_hbox.pack_start(self.progress_bar, expand=True, fill=True)
140 self.stop_button = HobAltButton("Stop")
141 self.stop_button.connect("clicked", self.stop_button_clicked_cb)
142 self.stop_button.set_sensitive(False)
143 self.progress_hbox.pack_end(self.stop_button, expand=False, fill=False)
144
145 self.notebook = HobNotebook()
146 self.config_tv = BuildConfigurationTreeView()
147 self.scrolled_view_config = gtk.ScrolledWindow ()
148 self.scrolled_view_config.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
149 self.scrolled_view_config.add(self.config_tv)
150 self.notebook.append_page(self.scrolled_view_config, "Build configuration")
151
152 self.failure_tv = BuildFailureTreeView()
153 self.failure_model = self.builder.handler.build.model.failure_model()
154 self.failure_tv.set_model(self.failure_model)
155 self.scrolled_view_failure = gtk.ScrolledWindow ()
156 self.scrolled_view_failure.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
157 self.scrolled_view_failure.add(self.failure_tv)
158 self.notebook.append_page(self.scrolled_view_failure, "Issues")
159
160 self.build_tv = RunningBuildTreeView(readonly=True, hob=True)
161 self.build_tv.set_model(self.builder.handler.build.model)
162 self.scrolled_view_build = gtk.ScrolledWindow ()
163 self.scrolled_view_build.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
164 self.scrolled_view_build.add(self.build_tv)
165 self.notebook.append_page(self.scrolled_view_build, "Log")
166
167 self.builder.handler.build.model.connect_after("row-changed", self.scroll_to_present_row, self.scrolled_view_build.get_vadjustment(), self.build_tv)
168
169 self.button_box = gtk.HBox(False, 6)
170 self.back_button = HobAltButton('&lt;&lt; Back')
171 self.back_button.connect("clicked", self.back_button_clicked_cb)
172 self.button_box.pack_start(self.back_button, expand=False, fill=False)
173
174 def update_build_status(self, current, total, task):
175 recipe_path, recipe_task = task.split(", ")
176 recipe = os.path.basename(recipe_path).rstrip(".bb")
177 tsk_msg = "<b>Running task %s of %s:</b> %s\n<b>Recipe:</b> %s" % (current, total, recipe_task, recipe)
178 self.task_status.set_markup(tsk_msg)
179 self.stop_button.set_sensitive(True)
180
181 def reset_build_status(self):
182 self.task_status.set_markup("\n") # to ensure layout is correct
183 self.endpath = (0,)
184
185 def show_issues(self):
186 self.num_of_issues += 1
187 self.notebook.show_indicator_icon("Issues", self.num_of_issues)
188 self.notebook.queue_draw()
189
190 def reset_issues(self):
191 self.num_of_issues = 0
192 self.notebook.hide_indicator_icon("Issues")
193
194 def _remove_all_widget(self):
195 children = self.vbox.get_children() or []
196 for child in children:
197 self.vbox.remove(child)
198 children = self.box_group_area.get_children() or []
199 for child in children:
200 self.box_group_area.remove(child)
201 children = self.get_children() or []
202 for child in children:
203 self.remove(child)
204
205 def add_build_fail_top_bar(self, actions, log_file=None):
206 primary_action = "Edit %s" % actions
207
208 color = HobColors.ERROR
209 build_fail_top = gtk.EventBox()
210 #build_fail_top.set_size_request(-1, 200)
211 build_fail_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
212
213 build_fail_tab = gtk.Table(14, 46, True)
214 build_fail_top.add(build_fail_tab)
215
216 icon = gtk.Image()
217 icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ERROR_FILE)
218 icon.set_from_pixbuf(icon_pix_buffer)
219 build_fail_tab.attach(icon, 1, 4, 0, 6)
220
221 label = gtk.Label()
222 label.set_alignment(0.0, 0.5)
223 label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
224 build_fail_tab.attach(label, 4, 26, 0, 6)
225
226 label = gtk.Label()
227 label.set_alignment(0.0, 0.5)
228 # Ensure variable disk_full is defined
229 if not hasattr(self.builder, 'disk_full'):
230 self.builder.disk_full = False
231
232 if self.builder.disk_full:
233 markup = "<span size='medium'>There is no disk space left, so Hob cannot finish building your image. Free up some disk space\n"
234 markup += "and restart the build. Check the \"Issues\" tab for more details</span>"
235 label.set_markup(markup)
236 else:
237 label.set_markup("<span size='medium'>Check the \"Issues\" information for more details</span>")
238 build_fail_tab.attach(label, 4, 40, 4, 9)
239
240 # create button 'Edit packages'
241 action_button = HobButton(primary_action)
242 #action_button.set_size_request(-1, 40)
243 action_button.set_tooltip_text("Edit the %s parameters" % actions)
244 action_button.connect('clicked', self.failure_primary_action_button_clicked_cb, primary_action)
245
246 if log_file:
247 open_log_button = HobAltButton("Open log")
248 open_log_button.set_relief(gtk.RELIEF_HALF)
249 open_log_button.set_tooltip_text("Open the build's log file")
250 open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
251
252 attach_pos = (24 if log_file else 14)
253 file_bug_button = HobAltButton('File a bug')
254 file_bug_button.set_relief(gtk.RELIEF_HALF)
255 file_bug_button.set_tooltip_text("Open the Yocto Project bug tracking website")
256 file_bug_button.connect('clicked', self.failure_activate_file_bug_link_cb)
257
258 if not self.builder.disk_full:
259 build_fail_tab.attach(action_button, 4, 13, 9, 12)
260 if log_file:
261 build_fail_tab.attach(open_log_button, 14, 23, 9, 12)
262 build_fail_tab.attach(file_bug_button, attach_pos, attach_pos + 9, 9, 12)
263
264 else:
265 restart_build = HobButton("Restart the build")
266 restart_build.set_tooltip_text("Restart the build")
267 restart_build.connect('clicked', self.restart_build_button_clicked_cb)
268
269 build_fail_tab.attach(restart_build, 4, 13, 9, 12)
270 build_fail_tab.attach(action_button, 14, 23, 9, 12)
271 if log_file:
272 build_fail_tab.attach(open_log_button, attach_pos, attach_pos + 9, 9, 12)
273
274 self.builder.disk_full = False
275 return build_fail_top
276
277 def show_fail_page(self, title):
278 self._remove_all_widget()
279 self.title = "Hob cannot build your %s" % title
280
281 self.build_fail_bar = self.add_build_fail_top_bar(title, self.builder.current_logfile)
282
283 self.pack_start(self.group_align, expand=True, fill=True)
284 self.box_group_area.pack_start(self.build_fail_bar, expand=False, fill=False)
285 self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
286
287 self.vbox.pack_start(self.notebook, expand=True, fill=True)
288 self.show_all()
289 self.notebook.set_page("Issues")
290 self.back_button.hide()
291
292 def add_build_stop_top_bar(self, action, log_file=None):
293 color = HobColors.LIGHT_GRAY
294 build_stop_top = gtk.EventBox()
295 #build_stop_top.set_size_request(-1, 200)
296 build_stop_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
297 build_stop_top.set_flags(gtk.CAN_DEFAULT)
298 build_stop_top.grab_default()
299
300 build_stop_tab = gtk.Table(11, 46, True)
301 build_stop_top.add(build_stop_tab)
302
303 icon = gtk.Image()
304 icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INFO_HOVER_FILE)
305 icon.set_from_pixbuf(icon_pix_buffer)
306 build_stop_tab.attach(icon, 1, 4, 0, 6)
307
308 label = gtk.Label()
309 label.set_alignment(0.0, 0.5)
310 label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
311 build_stop_tab.attach(label, 4, 26, 0, 6)
312
313 action_button = HobButton("Edit %s" % action)
314 action_button.set_size_request(-1, 40)
315 if action == "image":
316 action_button.set_tooltip_text("Edit the image parameters")
317 elif action == "recipes":
318 action_button.set_tooltip_text("Edit the included recipes")
319 elif action == "packages":
320 action_button.set_tooltip_text("Edit the included packages")
321 action_button.connect('clicked', self.stop_primary_action_button_clicked_cb, action)
322 build_stop_tab.attach(action_button, 4, 13, 6, 9)
323
324 if log_file:
325 open_log_button = HobAltButton("Open log")
326 open_log_button.set_relief(gtk.RELIEF_HALF)
327 open_log_button.set_tooltip_text("Open the build's log file")
328 open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
329 build_stop_tab.attach(open_log_button, 14, 23, 6, 9)
330
331 attach_pos = (24 if log_file else 14)
332 build_button = HobAltButton("Build new image")
333 #build_button.set_size_request(-1, 40)
334 build_button.set_tooltip_text("Create a new image from scratch")
335 build_button.connect('clicked', self.new_image_button_clicked_cb)
336 build_stop_tab.attach(build_button, attach_pos, attach_pos + 9, 6, 9)
337
338 return build_stop_top, action_button
339
340 def show_stop_page(self, action):
341 self._remove_all_widget()
342 self.title = "Build stopped"
343 self.build_stop_bar, action_button = self.add_build_stop_top_bar(action, self.builder.current_logfile)
344
345 self.pack_start(self.group_align, expand=True, fill=True)
346 self.box_group_area.pack_start(self.build_stop_bar, expand=False, fill=False)
347 self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
348
349 self.vbox.pack_start(self.notebook, expand=True, fill=True)
350 self.show_all()
351 self.back_button.hide()
352 return action_button
353
354 def show_page(self, step):
355 self._remove_all_widget()
356 if step == self.builder.PACKAGE_GENERATING or step == self.builder.FAST_IMAGE_GENERATING:
357 self.title = "Building packages ..."
358 else:
359 self.title = "Building image ..."
360 self.build_details_top = self.add_onto_top_bar(None)
361 self.pack_start(self.build_details_top, expand=False, fill=False)
362 self.pack_start(self.group_align, expand=True, fill=True)
363
364 self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
365
366 self.progress_bar.reset()
367 self.config_tv.reset()
368 self.vbox.pack_start(self.progress_box, expand=False, fill=False)
369
370 self.vbox.pack_start(self.notebook, expand=True, fill=True)
371
372 self.box_group_area.pack_end(self.button_box, expand=False, fill=False)
373 self.show_all()
374 self.notebook.set_page("Log")
375 self.back_button.hide()
376
377 self.reset_build_status()
378 self.reset_issues()
379
380 def update_progress_bar(self, title, fraction, status=None):
381 self.progress_bar.update(fraction)
382 self.progress_bar.set_title(title)
383 self.progress_bar.set_rcstyle(status)
384
385 def back_button_clicked_cb(self, button):
386 self.builder.show_configuration()
387
388 def new_image_button_clicked_cb(self, button):
389 self.builder.reset()
390
391 def show_back_button(self):
392 self.back_button.show()
393
394 def stop_button_clicked_cb(self, button):
395 self.builder.stop_build()
396
397 def hide_stop_button(self):
398 self.stop_button.set_sensitive(False)
399 self.stop_button.hide()
400
401 def scroll_to_present_row(self, model, path, iter, v_adj, treeview):
402 if treeview and v_adj:
403 if path[0] > self.endpath[0]: # check the event is a new row append or not
404 self.endpath = path
405 # check the gtk.adjustment position is at end boundary or not
406 if (v_adj.upper <= v_adj.page_size) or (v_adj.value == v_adj.upper - v_adj.page_size):
407 treeview.scroll_to_cell(path)
408
409 def show_configurations(self, configurations, params):
410 self.config_tv.show(configurations, params)
411
412 def failure_primary_action_button_clicked_cb(self, button, action):
413 if "Edit recipes" in action:
414 self.builder.show_recipes()
415 elif "Edit packages" in action:
416 self.builder.show_packages()
417 elif "Edit image" in action:
418 self.builder.show_configuration()
419
420 def restart_build_button_clicked_cb(self, button):
421 self.builder.just_bake()
422
423 def stop_primary_action_button_clicked_cb(self, button, action):
424 if "recipes" in action:
425 self.builder.show_recipes()
426 elif "packages" in action:
427 self.builder.show_packages()
428 elif "image" in action:
429 self.builder.show_configuration()
430
431 def open_log_button_clicked_cb(self, button, log_file):
432 if log_file:
433 log_file = "file:///" + log_file
434 gtk.show_uri(screen=button.get_screen(), uri=log_file, timestamp=0)
435
436 def failure_activate_file_bug_link_cb(self, button):
437 button.child.emit('activate-link', "http://bugzilla.yoctoproject.org")
diff --git a/bitbake/lib/bb/ui/crumbs/builder.py b/bitbake/lib/bb/ui/crumbs/builder.py
new file mode 100755
index 0000000000..455af320e8
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/builder.py
@@ -0,0 +1,1475 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2011-2012 Intel Corporation
6#
7# Authored by Joshua Lock <josh@linux.intel.com>
8# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
9# Authored by Shane Wang <shane.wang@intel.com>
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24import glib
25import gtk, gobject
26import copy
27import os
28import subprocess
29import shlex
30import re
31import logging
32import sys
33import signal
34import time
35from bb.ui.crumbs.imageconfigurationpage import ImageConfigurationPage
36from bb.ui.crumbs.recipeselectionpage import RecipeSelectionPage
37from bb.ui.crumbs.packageselectionpage import PackageSelectionPage
38from bb.ui.crumbs.builddetailspage import BuildDetailsPage
39from bb.ui.crumbs.imagedetailspage import ImageDetailsPage
40from bb.ui.crumbs.sanitycheckpage import SanityCheckPage
41from bb.ui.crumbs.hobwidget import hwc, HobButton, HobAltButton
42from bb.ui.crumbs.persistenttooltip import PersistentTooltip
43import bb.ui.crumbs.utils
44from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
45from bb.ui.crumbs.hig.simplesettingsdialog import SimpleSettingsDialog
46from bb.ui.crumbs.hig.advancedsettingsdialog import AdvancedSettingsDialog
47from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
48from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
49from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
50from bb.ui.crumbs.hig.parsingwarningsdialog import ParsingWarningsDialog
51from bb.ui.crumbs.hig.propertydialog import PropertyDialog
52
53hobVer = 20120808
54
55class Configuration:
56 '''Represents the data structure of configuration.'''
57
58 @classmethod
59 def parse_proxy_string(cls, proxy):
60 pattern = "^\s*((http|https|ftp|socks|cvs)://)?((\S+):(\S+)@)?([^\s:]+)(:(\d+))?/?"
61 match = re.search(pattern, proxy)
62 if match:
63 return match.group(2), match.group(4), match.group(5), match.group(6), match.group(8)
64 else:
65 return None, None, None, "", ""
66
67 @classmethod
68 def make_host_string(cls, prot, user, passwd, host, default_prot=""):
69 if host == None or host == "":
70 return ""
71
72 passwd = passwd or ""
73
74 if user != None and user != "":
75 if prot == None or prot == "":
76 prot = default_prot
77 return prot + "://" + user + ":" + passwd + "@" + host
78 else:
79 if prot == None or prot == "":
80 return host
81 else:
82 return prot + "://" + host
83
84 @classmethod
85 def make_port_string(cls, port):
86 port = port or ""
87 return port
88
89 @classmethod
90 def make_proxy_string(cls, prot, user, passwd, host, port, default_prot=""):
91 if host == None or host == "":# or port == None or port == "":
92 return ""
93
94 return Configuration.make_host_string(prot, user, passwd, host, default_prot) + (":" + Configuration.make_port_string(port) if port else "")
95
96 def __init__(self):
97 self.curr_mach = ""
98 self.selected_image = None
99 # settings
100 self.curr_distro = ""
101 self.dldir = self.sstatedir = self.sstatemirror = ""
102 self.pmake = self.bbthread = 0
103 self.curr_package_format = ""
104 self.image_rootfs_size = self.image_extra_size = 0
105 self.image_overhead_factor = 1
106 self.incompat_license = ""
107 self.curr_sdk_machine = ""
108 self.conf_version = self.lconf_version = ""
109 self.extra_setting = {}
110 self.toolchain_build = False
111 self.image_fstypes = ""
112 self.image_size = None
113 self.image_packages = []
114 # bblayers.conf
115 self.layers = []
116 # image/recipes/packages
117 self.clear_selection()
118
119 self.user_selected_packages = []
120
121 self.default_task = "build"
122
123 # proxy settings
124 self.enable_proxy = None
125 self.same_proxy = False
126 self.proxies = {
127 "http" : [None, None, None, "", ""], # protocol : [prot, user, passwd, host, port]
128 "https" : [None, None, None, "", ""],
129 "ftp" : [None, None, None, "", ""],
130 "socks" : [None, None, None, "", ""],
131 "cvs" : [None, None, None, "", ""],
132 }
133
134 def clear_selection(self):
135 self.selected_recipes = []
136 self.selected_packages = []
137 self.initial_selected_image = None
138 self.initial_selected_packages = []
139 self.initial_user_selected_packages = []
140
141 def split_proxy(self, protocol, proxy):
142 entry = []
143 prot, user, passwd, host, port = Configuration.parse_proxy_string(proxy)
144 entry.append(prot)
145 entry.append(user)
146 entry.append(passwd)
147 entry.append(host)
148 entry.append(port)
149 self.proxies[protocol] = entry
150
151 def combine_proxy(self, protocol):
152 entry = self.proxies[protocol]
153 return Configuration.make_proxy_string(entry[0], entry[1], entry[2], entry[3], entry[4], protocol)
154
155 def combine_host_only(self, protocol):
156 entry = self.proxies[protocol]
157 return Configuration.make_host_string(entry[0], entry[1], entry[2], entry[3], protocol)
158
159 def combine_port_only(self, protocol):
160 entry = self.proxies[protocol]
161 return Configuration.make_port_string(entry[4])
162
163 def update(self, params):
164 # settings
165 self.curr_distro = params["distro"]
166 self.dldir = params["dldir"]
167 self.sstatedir = params["sstatedir"]
168 self.sstatemirror = params["sstatemirror"]
169 self.pmake = int(params["pmake"].split()[1])
170 self.bbthread = params["bbthread"]
171 self.curr_package_format = " ".join(params["pclass"].split("package_")).strip()
172 self.image_rootfs_size = params["image_rootfs_size"]
173 self.image_extra_size = params["image_extra_size"]
174 self.image_overhead_factor = params['image_overhead_factor']
175 self.incompat_license = params["incompat_license"]
176 self.curr_sdk_machine = params["sdk_machine"]
177 self.conf_version = params["conf_version"]
178 self.lconf_version = params["lconf_version"]
179 self.image_fstypes = params["image_fstypes"]
180 # self.extra_setting/self.toolchain_build
181 # bblayers.conf
182 self.layers = params["layer"].split()
183 self.layers_non_removable = params["layers_non_removable"].split()
184 self.default_task = params["default_task"]
185
186 # proxy settings
187 self.enable_proxy = params["http_proxy"] != "" or params["https_proxy"] != "" \
188 or params["ftp_proxy"] != "" or params["socks_proxy"] != "" \
189 or params["cvs_proxy_host"] != "" or params["cvs_proxy_port"] != ""
190 self.split_proxy("http", params["http_proxy"])
191 self.split_proxy("https", params["https_proxy"])
192 self.split_proxy("ftp", params["ftp_proxy"])
193 self.split_proxy("socks", params["socks_proxy"])
194 self.split_proxy("cvs", params["cvs_proxy_host"] + ":" + params["cvs_proxy_port"])
195
196 def save(self, handler, defaults=False):
197 # bblayers.conf
198 handler.set_var_in_file("BBLAYERS", self.layers, "bblayers.conf")
199 # local.conf
200 if not defaults:
201 handler.early_assign_var_in_file("MACHINE", self.curr_mach, "local.conf")
202 handler.set_var_in_file("DISTRO", self.curr_distro, "local.conf")
203 handler.set_var_in_file("DL_DIR", self.dldir, "local.conf")
204 handler.set_var_in_file("SSTATE_DIR", self.sstatedir, "local.conf")
205 sstate_mirror_list = self.sstatemirror.split("\\n ")
206 sstate_mirror_list_modified = []
207 for mirror in sstate_mirror_list:
208 if mirror != "":
209 mirror = mirror + "\\n"
210 sstate_mirror_list_modified.append(mirror)
211 handler.set_var_in_file("SSTATE_MIRRORS", sstate_mirror_list_modified, "local.conf")
212 handler.set_var_in_file("PARALLEL_MAKE", "-j %s" % self.pmake, "local.conf")
213 handler.set_var_in_file("BB_NUMBER_THREADS", self.bbthread, "local.conf")
214 handler.set_var_in_file("PACKAGE_CLASSES", " ".join(["package_" + i for i in self.curr_package_format.split()]), "local.conf")
215 handler.set_var_in_file("IMAGE_ROOTFS_SIZE", self.image_rootfs_size, "local.conf")
216 handler.set_var_in_file("IMAGE_EXTRA_SPACE", self.image_extra_size, "local.conf")
217 handler.set_var_in_file("INCOMPATIBLE_LICENSE", self.incompat_license, "local.conf")
218 handler.set_var_in_file("SDKMACHINE", self.curr_sdk_machine, "local.conf")
219 handler.set_var_in_file("CONF_VERSION", self.conf_version, "local.conf")
220 handler.set_var_in_file("LCONF_VERSION", self.lconf_version, "bblayers.conf")
221 handler.set_extra_config(self.extra_setting)
222 handler.set_var_in_file("TOOLCHAIN_BUILD", self.toolchain_build, "local.conf")
223 handler.set_var_in_file("IMAGE_FSTYPES", self.image_fstypes, "local.conf")
224 if not defaults:
225 # image/recipes/packages
226 handler.set_var_in_file("__SELECTED_IMAGE__", self.selected_image, "local.conf")
227 handler.set_var_in_file("DEPENDS", self.selected_recipes, "local.conf")
228 handler.set_var_in_file("IMAGE_INSTALL", self.user_selected_packages, "local.conf")
229 # proxy
230 if self.enable_proxy == True:
231 handler.set_var_in_file("http_proxy", self.combine_proxy("http"), "local.conf")
232 handler.set_var_in_file("https_proxy", self.combine_proxy("https"), "local.conf")
233 handler.set_var_in_file("ftp_proxy", self.combine_proxy("ftp"), "local.conf")
234 handler.set_var_in_file("all_proxy", self.combine_proxy("socks"), "local.conf")
235 handler.set_var_in_file("CVS_PROXY_HOST", self.combine_host_only("cvs"), "local.conf")
236 handler.set_var_in_file("CVS_PROXY_PORT", self.combine_port_only("cvs"), "local.conf")
237 else:
238 handler.set_var_in_file("http_proxy", "", "local.conf")
239 handler.set_var_in_file("https_proxy", "", "local.conf")
240 handler.set_var_in_file("ftp_proxy", "", "local.conf")
241 handler.set_var_in_file("all_proxy", "", "local.conf")
242 handler.set_var_in_file("CVS_PROXY_HOST", "", "local.conf")
243 handler.set_var_in_file("CVS_PROXY_PORT", "", "local.conf")
244
245 def __str__(self):
246 s = "VERSION: '%s', BBLAYERS: '%s', MACHINE: '%s', DISTRO: '%s', DL_DIR: '%s'," % \
247 (hobVer, " ".join(self.layers), self.curr_mach, self.curr_distro, self.dldir )
248 s += "SSTATE_DIR: '%s', SSTATE_MIRROR: '%s', PARALLEL_MAKE: '-j %s', BB_NUMBER_THREADS: '%s', PACKAGE_CLASSES: '%s', " % \
249 (self.sstatedir, self.sstatemirror, self.pmake, self.bbthread, " ".join(["package_" + i for i in self.curr_package_format.split()]))
250 s += "IMAGE_ROOTFS_SIZE: '%s', IMAGE_EXTRA_SPACE: '%s', INCOMPATIBLE_LICENSE: '%s', SDKMACHINE: '%s', CONF_VERSION: '%s', " % \
251 (self.image_rootfs_size, self.image_extra_size, self.incompat_license, self.curr_sdk_machine, self.conf_version)
252 s += "LCONF_VERSION: '%s', EXTRA_SETTING: '%s', TOOLCHAIN_BUILD: '%s', IMAGE_FSTYPES: '%s', __SELECTED_IMAGE__: '%s', " % \
253 (self.lconf_version, self.extra_setting, self.toolchain_build, self.image_fstypes, self.selected_image)
254 s += "DEPENDS: '%s', IMAGE_INSTALL: '%s', enable_proxy: '%s', use_same_proxy: '%s', http_proxy: '%s', " % \
255 (self.selected_recipes, self.user_selected_packages, self.enable_proxy, self.same_proxy, self.combine_proxy("http"))
256 s += "https_proxy: '%s', ftp_proxy: '%s', all_proxy: '%s', CVS_PROXY_HOST: '%s', CVS_PROXY_PORT: '%s'" % \
257 (self.combine_proxy("https"), self.combine_proxy("ftp"), self.combine_proxy("socks"),
258 self.combine_host_only("cvs"), self.combine_port_only("cvs"))
259 return s
260
261class Parameters:
262 '''Represents other variables like available machines, etc.'''
263
264 def __init__(self):
265 # Variables
266 self.max_threads = 65535
267 self.core_base = ""
268 self.image_addr = ""
269 self.image_types = []
270 self.runnable_image_types = []
271 self.runnable_machine_patterns = []
272 self.deployable_image_types = []
273 self.tmpdir = ""
274
275 self.all_machines = []
276 self.all_package_formats = []
277 self.all_distros = []
278 self.all_sdk_machines = []
279 self.all_layers = []
280 self.image_names = []
281 self.image_white_pattern = ""
282 self.image_black_pattern = ""
283
284 # for build log to show
285 self.bb_version = ""
286 self.target_arch = ""
287 self.target_os = ""
288 self.distro_version = ""
289 self.tune_pkgarch = ""
290
291 def update(self, params):
292 self.max_threads = params["max_threads"]
293 self.core_base = params["core_base"]
294 self.image_addr = params["image_addr"]
295 self.image_types = params["image_types"].split()
296 self.runnable_image_types = params["runnable_image_types"].split()
297 self.runnable_machine_patterns = params["runnable_machine_patterns"].split()
298 self.deployable_image_types = params["deployable_image_types"].split()
299 self.tmpdir = params["tmpdir"]
300 self.image_white_pattern = params["image_white_pattern"]
301 self.image_black_pattern = params["image_black_pattern"]
302 self.kernel_image_type = params["kernel_image_type"]
303 # for build log to show
304 self.bb_version = params["bb_version"]
305 self.target_arch = params["target_arch"]
306 self.target_os = params["target_os"]
307 self.distro_version = params["distro_version"]
308 self.tune_pkgarch = params["tune_pkgarch"]
309
310def hob_conf_filter(fn, data):
311 if fn.endswith("/local.conf"):
312 distro = data.getVar("DISTRO_HOB")
313 if distro:
314 if distro != "defaultsetup":
315 data.setVar("DISTRO", distro)
316 else:
317 data.delVar("DISTRO")
318
319 keys = ["MACHINE_HOB", "SDKMACHINE_HOB", "PACKAGE_CLASSES_HOB", \
320 "BB_NUMBER_THREADS_HOB", "PARALLEL_MAKE_HOB", "DL_DIR_HOB", \
321 "SSTATE_DIR_HOB", "SSTATE_MIRRORS_HOB", "INCOMPATIBLE_LICENSE_HOB"]
322 for key in keys:
323 var_hob = data.getVar(key)
324 if var_hob:
325 data.setVar(key.split("_HOB")[0], var_hob)
326 return
327
328 if fn.endswith("/bblayers.conf"):
329 layers = data.getVar("BBLAYERS_HOB")
330 if layers:
331 data.setVar("BBLAYERS", layers)
332 return
333
334class Builder(gtk.Window):
335
336 (INITIAL_CHECKS,
337 MACHINE_SELECTION,
338 RCPPKGINFO_POPULATING,
339 RCPPKGINFO_POPULATED,
340 BASEIMG_SELECTED,
341 RECIPE_SELECTION,
342 PACKAGE_GENERATING,
343 PACKAGE_GENERATED,
344 PACKAGE_SELECTION,
345 FAST_IMAGE_GENERATING,
346 IMAGE_GENERATING,
347 IMAGE_GENERATED,
348 MY_IMAGE_OPENED,
349 BACK,
350 END_NOOP) = range(15)
351
352 (SANITY_CHECK,
353 IMAGE_CONFIGURATION,
354 RECIPE_DETAILS,
355 BUILD_DETAILS,
356 PACKAGE_DETAILS,
357 IMAGE_DETAILS,
358 END_TAB) = range(7)
359
360 __step2page__ = {
361 INITIAL_CHECKS : SANITY_CHECK,
362 MACHINE_SELECTION : IMAGE_CONFIGURATION,
363 RCPPKGINFO_POPULATING : IMAGE_CONFIGURATION,
364 RCPPKGINFO_POPULATED : IMAGE_CONFIGURATION,
365 BASEIMG_SELECTED : IMAGE_CONFIGURATION,
366 RECIPE_SELECTION : RECIPE_DETAILS,
367 PACKAGE_GENERATING : BUILD_DETAILS,
368 PACKAGE_GENERATED : PACKAGE_DETAILS,
369 PACKAGE_SELECTION : PACKAGE_DETAILS,
370 FAST_IMAGE_GENERATING : BUILD_DETAILS,
371 IMAGE_GENERATING : BUILD_DETAILS,
372 IMAGE_GENERATED : IMAGE_DETAILS,
373 MY_IMAGE_OPENED : IMAGE_DETAILS,
374 END_NOOP : None,
375 }
376
377 SANITY_CHECK_MIN_DISPLAY_TIME = 5
378
379 def __init__(self, hobHandler, recipe_model, package_model):
380 super(Builder, self).__init__()
381
382 self.hob_image = "hob-image"
383
384 # handler
385 self.handler = hobHandler
386
387 # logger
388 self.logger = logging.getLogger("BitBake")
389 self.consolelog = None
390 self.current_logfile = None
391
392 # configuration and parameters
393 self.configuration = Configuration()
394 self.parameters = Parameters()
395
396 # build step
397 self.current_step = None
398 self.previous_step = None
399
400 self.stopping = False
401
402 # recipe model and package model
403 self.recipe_model = recipe_model
404 self.package_model = package_model
405
406 # Indicate whether user has customized the image
407 self.customized = False
408
409 # Indicate whether the UI is working
410 self.sensitive = True
411
412 # Indicate whether the sanity check ran
413 self.sanity_checked = False
414
415 # save parsing warnings
416 self.parsing_warnings = []
417
418 # create visual elements
419 self.create_visual_elements()
420
421 # connect the signals to functions
422 self.connect("delete-event", self.destroy_window_cb)
423 self.recipe_model.connect ("recipe-selection-changed", self.recipelist_changed_cb)
424 self.package_model.connect("package-selection-changed", self.packagelist_changed_cb)
425 self.handler.connect("config-updated", self.handler_config_updated_cb)
426 self.handler.connect("package-formats-updated", self.handler_package_formats_updated_cb)
427 self.handler.connect("parsing-started", self.handler_parsing_started_cb)
428 self.handler.connect("parsing", self.handler_parsing_cb)
429 self.handler.connect("parsing-completed", self.handler_parsing_completed_cb)
430 self.handler.build.connect("build-started", self.handler_build_started_cb)
431 self.handler.build.connect("build-succeeded", self.handler_build_succeeded_cb)
432 self.handler.build.connect("build-failed", self.handler_build_failed_cb)
433 self.handler.build.connect("build-aborted", self.handler_build_aborted_cb)
434 self.handler.build.connect("task-started", self.handler_task_started_cb)
435 self.handler.build.connect("disk-full", self.handler_disk_full_cb)
436 self.handler.build.connect("log-error", self.handler_build_failure_cb)
437 self.handler.build.connect("log-warning", self.handler_build_failure_cb)
438 self.handler.build.connect("log", self.handler_build_log_cb)
439 self.handler.build.connect("no-provider", self.handler_no_provider_cb)
440 self.handler.connect("generating-data", self.handler_generating_data_cb)
441 self.handler.connect("data-generated", self.handler_data_generated_cb)
442 self.handler.connect("command-succeeded", self.handler_command_succeeded_cb)
443 self.handler.connect("command-failed", self.handler_command_failed_cb)
444 self.handler.connect("parsing-warning", self.handler_parsing_warning_cb)
445 self.handler.connect("sanity-failed", self.handler_sanity_failed_cb)
446 self.handler.connect("recipe-populated", self.handler_recipe_populated_cb)
447 self.handler.connect("package-populated", self.handler_package_populated_cb)
448
449 self.handler.append_to_bbfiles("${TOPDIR}/recipes/images/custom/*.bb")
450 self.handler.append_to_bbfiles("${TOPDIR}/recipes/images/*.bb")
451 self.initiate_new_build_async()
452
453 signal.signal(signal.SIGINT, self.event_handle_SIGINT)
454
455 def create_visual_elements(self):
456 self.set_title("Hob")
457 self.set_icon_name("applications-development")
458 self.set_resizable(True)
459
460 try:
461 window_width = self.get_screen().get_width()
462 window_height = self.get_screen().get_height()
463 except AttributeError:
464 print "Please set DISPLAY variable before running Hob."
465 sys.exit(1)
466
467 if window_width >= hwc.MAIN_WIN_WIDTH:
468 window_width = hwc.MAIN_WIN_WIDTH
469 window_height = hwc.MAIN_WIN_HEIGHT
470 self.set_size_request(window_width, window_height)
471
472 self.vbox = gtk.VBox(False, 0)
473 self.vbox.set_border_width(0)
474 self.add(self.vbox)
475
476 # create pages
477 self.image_configuration_page = ImageConfigurationPage(self)
478 self.recipe_details_page = RecipeSelectionPage(self)
479 self.build_details_page = BuildDetailsPage(self)
480 self.package_details_page = PackageSelectionPage(self)
481 self.image_details_page = ImageDetailsPage(self)
482 self.sanity_check_page = SanityCheckPage(self)
483 self.display_sanity_check = False
484 self.sanity_check_post_func = False
485 self.had_network_error = False
486
487 self.nb = gtk.Notebook()
488 self.nb.set_show_tabs(False)
489 self.nb.insert_page(self.sanity_check_page, None, self.SANITY_CHECK)
490 self.nb.insert_page(self.image_configuration_page, None, self.IMAGE_CONFIGURATION)
491 self.nb.insert_page(self.recipe_details_page, None, self.RECIPE_DETAILS)
492 self.nb.insert_page(self.build_details_page, None, self.BUILD_DETAILS)
493 self.nb.insert_page(self.package_details_page, None, self.PACKAGE_DETAILS)
494 self.nb.insert_page(self.image_details_page, None, self.IMAGE_DETAILS)
495 self.vbox.pack_start(self.nb, expand=True, fill=True)
496
497 self.show_all()
498 self.nb.set_current_page(0)
499
500 def sanity_check_timeout(self):
501 # The minimum time for showing the 'sanity check' page has passe
502 # If someone set the 'sanity_check_post_step' meanwhile, execute it now
503 self.display_sanity_check = False
504 if self.sanity_check_post_func:
505 temp = self.sanity_check_post_func
506 self.sanity_check_post_func = None
507 temp()
508 return False
509
510 def show_sanity_check_page(self):
511 # This window must stay on screen for at least 5 seconds, according to the design document
512 self.nb.set_current_page(self.SANITY_CHECK)
513 self.sanity_check_post_step = None
514 self.display_sanity_check = True
515 self.sanity_check_page.start()
516 gobject.timeout_add(self.SANITY_CHECK_MIN_DISPLAY_TIME * 1000, self.sanity_check_timeout)
517
518 def execute_after_sanity_check(self, func):
519 if not self.display_sanity_check:
520 func()
521 else:
522 self.sanity_check_post_func = func
523
524 def generate_configuration(self):
525 if not self.sanity_checked:
526 self.show_sanity_check_page()
527 self.handler.generate_configuration()
528
529 def initiate_new_build_async(self):
530 self.configuration.selected_image = None
531 self.switch_page(self.MACHINE_SELECTION)
532 self.handler.init_cooker()
533 self.handler.set_extra_inherit("image_types")
534 self.generate_configuration()
535
536 def update_config_async(self):
537 self.set_user_config()
538 self.generate_configuration()
539 self.switch_page(self.MACHINE_SELECTION)
540
541 def sanity_check(self):
542 self.handler.trigger_sanity_check()
543
544 def populate_recipe_package_info_async(self):
545 self.switch_page(self.RCPPKGINFO_POPULATING)
546 # Parse recipes
547 self.set_user_config()
548 self.handler.generate_recipes()
549
550 def generate_packages_async(self, log = False):
551 self.switch_page(self.PACKAGE_GENERATING)
552 if log:
553 self.current_logfile = self.handler.get_logfile()
554 self.do_log(self.current_logfile)
555 # Build packages
556 _, all_recipes = self.recipe_model.get_selected_recipes()
557 self.set_user_config()
558 self.handler.reset_build()
559 self.handler.generate_packages(all_recipes, self.configuration.default_task)
560
561 def restore_initial_selected_packages(self):
562 self.package_model.set_selected_packages(self.configuration.initial_user_selected_packages, True)
563 self.package_model.set_selected_packages(self.configuration.initial_selected_packages)
564 for package in self.configuration.selected_packages:
565 if package not in self.configuration.initial_selected_packages:
566 self.package_model.exclude_item(self.package_model.find_path_for_item(package))
567
568 def fast_generate_image_async(self, log = False):
569 self.switch_page(self.FAST_IMAGE_GENERATING)
570 if log:
571 self.current_logfile = self.handler.get_logfile()
572 self.do_log(self.current_logfile)
573 # Build packages
574 _, all_recipes = self.recipe_model.get_selected_recipes()
575 self.set_user_config()
576 self.handler.reset_build()
577 self.handler.generate_packages(all_recipes, self.configuration.default_task)
578
579 def generate_image_async(self, cont = False):
580 self.switch_page(self.IMAGE_GENERATING)
581 self.handler.reset_build()
582 if not cont:
583 self.current_logfile = self.handler.get_logfile()
584 self.do_log(self.current_logfile)
585 # Build image
586 self.set_user_config()
587 toolchain_packages = []
588 base_image = None
589 if self.configuration.toolchain_build:
590 toolchain_packages = self.package_model.get_selected_packages_toolchain()
591 if self.configuration.selected_image == self.recipe_model.__custom_image__:
592 packages = self.package_model.get_selected_packages()
593 image = self.hob_image
594 base_image = self.configuration.initial_selected_image
595 else:
596 packages = []
597 image = self.configuration.selected_image
598 self.handler.generate_image(image,
599 base_image,
600 packages,
601 toolchain_packages,
602 self.configuration.default_task)
603
604 def generate_new_image(self, image, description):
605 base_image = self.configuration.initial_selected_image
606 if base_image == self.recipe_model.__custom_image__:
607 base_image = None
608 packages = self.package_model.get_selected_packages()
609 self.handler.generate_new_image(image, base_image, packages, description)
610
611 def ensure_dir(self, directory):
612 self.handler.ensure_dir(directory)
613
614 def get_parameters_sync(self):
615 return self.handler.get_parameters()
616
617 def request_package_info_async(self):
618 self.handler.request_package_info()
619
620 def cancel_build_sync(self, force=False):
621 self.handler.cancel_build(force)
622
623 def cancel_parse_sync(self):
624 self.handler.cancel_parse()
625
626 def switch_page(self, next_step):
627 # Main Workflow (Business Logic)
628 self.nb.set_current_page(self.__step2page__[next_step])
629
630 if next_step == self.MACHINE_SELECTION: # init step
631 self.image_configuration_page.show_machine()
632
633 elif next_step == self.RCPPKGINFO_POPULATING:
634 # MACHINE CHANGED action or SETTINGS CHANGED
635 # show the progress bar
636 self.image_configuration_page.show_info_populating()
637
638 elif next_step == self.RCPPKGINFO_POPULATED:
639 self.image_configuration_page.show_info_populated()
640
641 elif next_step == self.BASEIMG_SELECTED:
642 self.image_configuration_page.show_baseimg_selected()
643
644 elif next_step == self.RECIPE_SELECTION:
645 if self.recipe_model.get_selected_image() == self.recipe_model.__custom_image__:
646 self.recipe_details_page.set_recipe_curr_tab(self.recipe_details_page.ALL)
647 else:
648 self.recipe_details_page.set_recipe_curr_tab(self.recipe_details_page.INCLUDED)
649
650 elif next_step == self.PACKAGE_SELECTION:
651 self.configuration.initial_selected_packages = self.configuration.selected_packages
652 self.configuration.initial_user_selected_packages = self.configuration.user_selected_packages
653 self.package_details_page.set_title("Edit packages")
654 if self.recipe_model.get_selected_image() == self.recipe_model.__custom_image__:
655 self.package_details_page.set_packages_curr_tab(self.package_details_page.ALL)
656 else:
657 self.package_details_page.set_packages_curr_tab(self.package_details_page.INCLUDED)
658 self.package_details_page.show_page(self.current_logfile)
659
660
661 elif next_step == self.PACKAGE_GENERATING or next_step == self.FAST_IMAGE_GENERATING:
662 # both PACKAGE_GENERATING and FAST_IMAGE_GENERATING share the same page
663 self.build_details_page.show_page(next_step)
664
665 elif next_step == self.PACKAGE_GENERATED:
666 self.package_details_page.set_title("Step 2 of 2: Edit packages")
667 if self.recipe_model.get_selected_image() == self.recipe_model.__custom_image__:
668 self.package_details_page.set_packages_curr_tab(self.package_details_page.ALL)
669 else:
670 self.package_details_page.set_packages_curr_tab(self.package_details_page.INCLUDED)
671 self.package_details_page.show_page(self.current_logfile)
672
673 elif next_step == self.IMAGE_GENERATING:
674 # after packages are generated, selected_packages need to
675 # be updated in package_model per selected_image in recipe_model
676 self.build_details_page.show_page(next_step)
677
678 elif next_step == self.IMAGE_GENERATED:
679 self.image_details_page.show_page(next_step)
680
681 elif next_step == self.MY_IMAGE_OPENED:
682 self.image_details_page.show_page(next_step)
683
684 self.previous_step = self.current_step
685 self.current_step = next_step
686
687 def set_user_config_proxies(self):
688 if self.configuration.enable_proxy == True:
689 self.handler.set_http_proxy(self.configuration.combine_proxy("http"))
690 self.handler.set_https_proxy(self.configuration.combine_proxy("https"))
691 self.handler.set_ftp_proxy(self.configuration.combine_proxy("ftp"))
692 self.handler.set_socks_proxy(self.configuration.combine_proxy("socks"))
693 self.handler.set_cvs_proxy(self.configuration.combine_host_only("cvs"), self.configuration.combine_port_only("cvs"))
694 elif self.configuration.enable_proxy == False:
695 self.handler.set_http_proxy("")
696 self.handler.set_https_proxy("")
697 self.handler.set_ftp_proxy("")
698 self.handler.set_socks_proxy("")
699 self.handler.set_cvs_proxy("", "")
700
701 def set_user_config_extra(self):
702 self.handler.set_rootfs_size(self.configuration.image_rootfs_size)
703 self.handler.set_extra_size(self.configuration.image_extra_size)
704 self.handler.set_incompatible_license(self.configuration.incompat_license)
705 self.handler.set_sdk_machine(self.configuration.curr_sdk_machine)
706 self.handler.set_image_fstypes(self.configuration.image_fstypes)
707 self.handler.set_extra_config(self.configuration.extra_setting)
708 self.handler.set_extra_inherit("packageinfo image_types")
709 self.set_user_config_proxies()
710
711 def set_user_config(self):
712 # set bb layers
713 self.handler.set_bblayers(self.configuration.layers)
714 # set local configuration
715 self.handler.set_machine(self.configuration.curr_mach)
716 self.handler.set_package_format(self.configuration.curr_package_format)
717 self.handler.set_distro(self.configuration.curr_distro)
718 self.handler.set_dl_dir(self.configuration.dldir)
719 self.handler.set_sstate_dir(self.configuration.sstatedir)
720 self.handler.set_sstate_mirrors(self.configuration.sstatemirror)
721 self.handler.set_pmake(self.configuration.pmake)
722 self.handler.set_bbthreads(self.configuration.bbthread)
723 self.set_user_config_extra()
724
725 def update_recipe_model(self, selected_image, selected_recipes):
726 self.recipe_model.set_selected_image(selected_image)
727 self.recipe_model.set_selected_recipes(selected_recipes)
728
729 def update_package_model(self, selected_packages, user_selected_packages=None):
730 if user_selected_packages:
731 left = self.package_model.set_selected_packages(user_selected_packages, True)
732 self.configuration.user_selected_packages += left
733 left = self.package_model.set_selected_packages(selected_packages)
734 self.configuration.selected_packages += left
735
736 def update_configuration_parameters(self, params):
737 if params:
738 self.configuration.update(params)
739 self.parameters.update(params)
740
741 def set_base_image(self):
742 self.configuration.initial_selected_image = self.configuration.selected_image
743 if self.configuration.selected_image != self.recipe_model.__custom_image__:
744 self.hob_image = self.configuration.selected_image + "-edited"
745
746 def reset(self):
747 self.configuration.curr_mach = ""
748 self.configuration.clear_selection()
749 self.image_configuration_page.switch_machine_combo()
750 self.switch_page(self.MACHINE_SELECTION)
751
752 # Callback Functions
753 def handler_config_updated_cb(self, handler, which, values):
754 if which == "distro":
755 self.parameters.all_distros = values
756 elif which == "machine":
757 self.parameters.all_machines = values
758 self.image_configuration_page.update_machine_combo()
759 elif which == "machine-sdk":
760 self.parameters.all_sdk_machines = values
761
762 def handler_package_formats_updated_cb(self, handler, formats):
763 self.parameters.all_package_formats = formats
764
765 def switch_to_image_configuration_helper(self):
766 self.sanity_check_page.stop()
767 self.switch_page(self.IMAGE_CONFIGURATION)
768 self.image_configuration_page.switch_machine_combo()
769
770 def show_network_error_dialog_helper(self):
771 self.sanity_check_page.stop()
772 self.show_network_error_dialog()
773
774 def handler_command_succeeded_cb(self, handler, initcmd):
775 if initcmd == self.handler.GENERATE_CONFIGURATION:
776 if not self.configuration.curr_mach:
777 self.configuration.curr_mach = self.handler.runCommand(["getVariable", "HOB_MACHINE"]) or ""
778 self.update_configuration_parameters(self.get_parameters_sync())
779 if not self.sanity_checked:
780 self.sanity_check()
781 self.sanity_checked = True
782 elif initcmd == self.handler.SANITY_CHECK:
783 if self.had_network_error:
784 self.had_network_error = False
785 self.execute_after_sanity_check(self.show_network_error_dialog_helper)
786 else:
787 # Switch to the 'image configuration' page now, but we might need
788 # to wait for the minimum display time of the sanity check page
789 self.execute_after_sanity_check(self.switch_to_image_configuration_helper)
790 elif initcmd in [self.handler.GENERATE_RECIPES,
791 self.handler.GENERATE_PACKAGES,
792 self.handler.GENERATE_IMAGE]:
793 self.update_configuration_parameters(self.get_parameters_sync())
794 self.request_package_info_async()
795 elif initcmd == self.handler.POPULATE_PACKAGEINFO:
796 if self.current_step == self.RCPPKGINFO_POPULATING:
797 self.switch_page(self.RCPPKGINFO_POPULATED)
798 self.rcppkglist_populated()
799 return
800
801 self.rcppkglist_populated()
802 if self.current_step == self.FAST_IMAGE_GENERATING:
803 self.generate_image_async(True)
804
805 def show_error_dialog(self, msg):
806 lbl = "<b>Hob found an error</b>"
807 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_ERROR, msg)
808 button = dialog.add_button("Close", gtk.RESPONSE_OK)
809 HobButton.style_button(button)
810 response = dialog.run()
811 dialog.destroy()
812
813 def show_warning_dialog(self):
814 dialog = ParsingWarningsDialog(title = "View warnings",
815 warnings = self.parsing_warnings,
816 parent = None,
817 flags = gtk.DIALOG_DESTROY_WITH_PARENT
818 | gtk.DIALOG_NO_SEPARATOR)
819 response = dialog.run()
820 dialog.destroy()
821
822 def show_network_error_dialog(self):
823 lbl = "<b>Hob cannot connect to the network</b>"
824 msg = msg + "Please check your network connection. If you are using a proxy server, please make sure it is configured correctly."
825 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_ERROR, msg)
826 button = dialog.add_button("Close", gtk.RESPONSE_OK)
827 HobButton.style_button(button)
828 button = dialog.add_button("Proxy settings", gtk.RESPONSE_CANCEL)
829 HobButton.style_button(button)
830 res = dialog.run()
831 dialog.destroy()
832 if res == gtk.RESPONSE_CANCEL:
833 res, settings_changed = self.show_simple_settings_dialog(SimpleSettingsDialog.PROXIES_PAGE_ID)
834 if not res:
835 return
836 if settings_changed:
837 self.reparse_post_adv_settings()
838
839 def handler_command_failed_cb(self, handler, msg):
840 if msg:
841 self.show_error_dialog(msg)
842 self.reset()
843
844 def handler_parsing_warning_cb(self, handler, warn_msg):
845 self.parsing_warnings.append(warn_msg)
846
847 def handler_sanity_failed_cb(self, handler, msg, network_error):
848 self.reset()
849 if network_error:
850 # Mark this in an internal field. The "network error" dialog will be
851 # shown later, when a SanityCheckPassed event will be handled
852 # (as sent by sanity.bbclass)
853 self.had_network_error = True
854 else:
855 msg = msg.replace("your local.conf", "Settings")
856 self.show_error_dialog(msg)
857 self.reset()
858
859 def window_sensitive(self, sensitive):
860 self.image_configuration_page.machine_combo.set_sensitive(sensitive)
861 self.image_configuration_page.machine_combo.child.set_sensitive(sensitive)
862 self.image_configuration_page.image_combo.set_sensitive(sensitive)
863 self.image_configuration_page.image_combo.child.set_sensitive(sensitive)
864 self.image_configuration_page.layer_button.set_sensitive(sensitive)
865 self.image_configuration_page.layer_info_icon.set_sensitive(sensitive)
866 self.image_configuration_page.toolbar.set_sensitive(sensitive)
867 self.image_configuration_page.view_adv_configuration_button.set_sensitive(sensitive)
868 self.image_configuration_page.config_build_button.set_sensitive(sensitive)
869
870 self.recipe_details_page.set_sensitive(sensitive)
871 self.package_details_page.set_sensitive(sensitive)
872 self.build_details_page.set_sensitive(sensitive)
873 self.image_details_page.set_sensitive(sensitive)
874
875 if sensitive:
876 self.window.set_cursor(None)
877 else:
878 self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
879 self.sensitive = sensitive
880
881
882 def handler_generating_data_cb(self, handler):
883 self.window_sensitive(False)
884
885 def handler_data_generated_cb(self, handler):
886 self.window_sensitive(True)
887
888 def rcppkglist_populated(self):
889 selected_image = self.configuration.selected_image
890 selected_recipes = self.configuration.selected_recipes[:]
891 selected_packages = self.configuration.selected_packages[:]
892 user_selected_packages = self.configuration.user_selected_packages[:]
893
894 self.image_configuration_page.update_image_combo(self.recipe_model, selected_image)
895 self.image_configuration_page.update_image_desc()
896 self.update_recipe_model(selected_image, selected_recipes)
897 self.update_package_model(selected_packages, user_selected_packages)
898
899 def recipelist_changed_cb(self, recipe_model):
900 self.recipe_details_page.refresh_selection()
901
902 def packagelist_changed_cb(self, package_model):
903 self.package_details_page.refresh_selection()
904
905 def handler_recipe_populated_cb(self, handler):
906 self.image_configuration_page.update_progress_bar("Populating recipes", 0.99)
907
908 def handler_package_populated_cb(self, handler):
909 self.image_configuration_page.update_progress_bar("Populating packages", 1.0)
910
911 def handler_parsing_started_cb(self, handler, message):
912 if self.current_step != self.RCPPKGINFO_POPULATING:
913 return
914
915 fraction = 0
916 if message["eventname"] == "TreeDataPreparationStarted":
917 fraction = 0.6 + fraction
918 self.image_configuration_page.stop_button.set_sensitive(False)
919 self.image_configuration_page.update_progress_bar("Generating dependency tree", fraction)
920 else:
921 self.image_configuration_page.stop_button.set_sensitive(True)
922 self.image_configuration_page.update_progress_bar(message["title"], fraction)
923
924 def handler_parsing_cb(self, handler, message):
925 if self.current_step != self.RCPPKGINFO_POPULATING:
926 return
927
928 fraction = message["current"] * 1.0/message["total"]
929 if message["eventname"] == "TreeDataPreparationProgress":
930 fraction = 0.6 + 0.38 * fraction
931 self.image_configuration_page.update_progress_bar("Generating dependency tree", fraction)
932 else:
933 fraction = 0.6 * fraction
934 self.image_configuration_page.update_progress_bar(message["title"], fraction)
935
936 def handler_parsing_completed_cb(self, handler, message):
937 if self.current_step != self.RCPPKGINFO_POPULATING:
938 return
939
940 if message["eventname"] == "TreeDataPreparationCompleted":
941 fraction = 0.98
942 else:
943 fraction = 0.6
944 self.image_configuration_page.update_progress_bar("Generating dependency tree", fraction)
945
946 def handler_build_started_cb(self, running_build):
947 if self.current_step == self.FAST_IMAGE_GENERATING:
948 fraction = 0
949 elif self.current_step == self.IMAGE_GENERATING:
950 if self.previous_step == self.FAST_IMAGE_GENERATING:
951 fraction = 0.9
952 else:
953 fraction = 0
954 elif self.current_step == self.PACKAGE_GENERATING:
955 fraction = 0
956 self.build_details_page.update_progress_bar("Build Started: ", fraction)
957 self.build_details_page.show_configurations(self.configuration, self.parameters)
958
959 def build_succeeded(self):
960 if self.current_step == self.FAST_IMAGE_GENERATING:
961 fraction = 0.9
962 elif self.current_step == self.IMAGE_GENERATING:
963 fraction = 1.0
964 version = ""
965 self.parameters.image_names = []
966 selected_image = self.recipe_model.get_selected_image()
967 if selected_image == self.recipe_model.__custom_image__:
968 if self.configuration.initial_selected_image != selected_image:
969 version = self.recipe_model.get_custom_image_version()
970 linkname = self.hob_image + version + "-" + self.configuration.curr_mach
971 else:
972 linkname = selected_image + '-' + self.configuration.curr_mach
973 image_extension = self.get_image_extension()
974 for image_type in self.parameters.image_types:
975 if image_type in image_extension:
976 real_types = image_extension[image_type]
977 else:
978 real_types = [image_type]
979 for real_image_type in real_types:
980 linkpath = self.parameters.image_addr + '/' + linkname + '.' + real_image_type
981 if os.path.exists(linkpath):
982 self.parameters.image_names.append(os.readlink(linkpath))
983 elif self.current_step == self.PACKAGE_GENERATING:
984 fraction = 1.0
985 self.build_details_page.update_progress_bar("Build Completed: ", fraction)
986 self.handler.build_succeeded_async()
987 self.stopping = False
988
989 if self.current_step == self.PACKAGE_GENERATING:
990 self.switch_page(self.PACKAGE_GENERATED)
991 elif self.current_step == self.IMAGE_GENERATING:
992 self.switch_page(self.IMAGE_GENERATED)
993
994 def build_failed(self):
995 if self.stopping:
996 status = "stop"
997 message = "Build stopped: "
998 fraction = self.build_details_page.progress_bar.get_fraction()
999 stop_to_next_edit = ""
1000 if self.current_step == self.FAST_IMAGE_GENERATING:
1001 stop_to_next_edit = "image configuration"
1002 elif self.current_step == self.IMAGE_GENERATING:
1003 if self.previous_step == self.FAST_IMAGE_GENERATING:
1004 stop_to_next_edit = "image configuration"
1005 else:
1006 stop_to_next_edit = "packages"
1007 elif self.current_step == self.PACKAGE_GENERATING:
1008 stop_to_next_edit = "recipes"
1009 button = self.build_details_page.show_stop_page(stop_to_next_edit.split(' ')[0])
1010 self.set_default(button)
1011 else:
1012 fail_to_next_edit = ""
1013 if self.current_step == self.FAST_IMAGE_GENERATING:
1014 fail_to_next_edit = "image configuration"
1015 fraction = 0.9
1016 elif self.current_step == self.IMAGE_GENERATING:
1017 if self.previous_step == self.FAST_IMAGE_GENERATING:
1018 fail_to_next_edit = "image configuration"
1019 else:
1020 fail_to_next_edit = "packages"
1021 fraction = 1.0
1022 elif self.current_step == self.PACKAGE_GENERATING:
1023 fail_to_next_edit = "recipes"
1024 fraction = 1.0
1025 self.build_details_page.show_fail_page(fail_to_next_edit.split(' ')[0])
1026 status = "fail"
1027 message = "Build failed: "
1028 self.build_details_page.update_progress_bar(message, fraction, status)
1029 self.build_details_page.show_back_button()
1030 self.build_details_page.hide_stop_button()
1031 self.handler.build_failed_async()
1032 self.stopping = False
1033
1034 def handler_build_succeeded_cb(self, running_build):
1035 if not self.stopping:
1036 self.build_succeeded()
1037 else:
1038 self.build_failed()
1039
1040
1041 def handler_build_failed_cb(self, running_build):
1042 self.build_failed()
1043
1044 def handler_build_aborted_cb(self, running_build):
1045 self.build_failed()
1046
1047 def handler_no_provider_cb(self, running_build, msg):
1048 dialog = CrumbsMessageDialog(self, glib.markup_escape_text(msg), gtk.MESSAGE_INFO)
1049 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1050 HobButton.style_button(button)
1051 dialog.run()
1052 dialog.destroy()
1053 self.build_failed()
1054
1055 def handler_task_started_cb(self, running_build, message):
1056 fraction = message["current"] * 1.0/message["total"]
1057 title = "Build packages"
1058 if self.current_step == self.FAST_IMAGE_GENERATING:
1059 if message["eventname"] == "sceneQueueTaskStarted":
1060 fraction = 0.27 * fraction
1061 elif message["eventname"] == "runQueueTaskStarted":
1062 fraction = 0.27 + 0.63 * fraction
1063 elif self.current_step == self.IMAGE_GENERATING:
1064 title = "Build image"
1065 if self.previous_step == self.FAST_IMAGE_GENERATING:
1066 if message["eventname"] == "sceneQueueTaskStarted":
1067 fraction = 0.27 + 0.63 + 0.03 * fraction
1068 elif message["eventname"] == "runQueueTaskStarted":
1069 fraction = 0.27 + 0.63 + 0.03 + 0.07 * fraction
1070 else:
1071 if message["eventname"] == "sceneQueueTaskStarted":
1072 fraction = 0.2 * fraction
1073 elif message["eventname"] == "runQueueTaskStarted":
1074 fraction = 0.2 + 0.8 * fraction
1075 elif self.current_step == self.PACKAGE_GENERATING:
1076 if message["eventname"] == "sceneQueueTaskStarted":
1077 fraction = 0.2 * fraction
1078 elif message["eventname"] == "runQueueTaskStarted":
1079 fraction = 0.2 + 0.8 * fraction
1080 self.build_details_page.update_progress_bar(title + ": ", fraction)
1081 self.build_details_page.update_build_status(message["current"], message["total"], message["task"])
1082
1083 def handler_disk_full_cb(self, running_build):
1084 self.disk_full = True
1085
1086 def handler_build_failure_cb(self, running_build):
1087 self.build_details_page.show_issues()
1088
1089 def handler_build_log_cb(self, running_build, func, obj):
1090 if hasattr(self.logger, func):
1091 getattr(self.logger, func)(obj)
1092
1093 def destroy_window_cb(self, widget, event):
1094 if not self.sensitive:
1095 return True
1096 elif self.handler.building:
1097 self.stop_build()
1098 return True
1099 else:
1100 gtk.main_quit()
1101
1102 def event_handle_SIGINT(self, signal, frame):
1103 for w in gtk.window_list_toplevels():
1104 if w.get_modal():
1105 w.response(gtk.RESPONSE_DELETE_EVENT)
1106 sys.exit(0)
1107
1108 def build_packages(self):
1109 _, all_recipes = self.recipe_model.get_selected_recipes()
1110 if not all_recipes:
1111 lbl = "<b>No selections made</b>"
1112 msg = "You have not made any selections"
1113 msg = msg + " so there isn't anything to bake at this time."
1114 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
1115 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1116 HobButton.style_button(button)
1117 dialog.run()
1118 dialog.destroy()
1119 return
1120 self.generate_packages_async(True)
1121
1122 def build_image(self):
1123 selected_packages = self.package_model.get_selected_packages()
1124 if not selected_packages:
1125 lbl = "<b>No selections made</b>"
1126 msg = "You have not made any selections"
1127 msg = msg + " so there isn't anything to bake at this time."
1128 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
1129 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1130 HobButton.style_button(button)
1131 dialog.run()
1132 dialog.destroy()
1133 return
1134 self.generate_image_async(True)
1135
1136 def just_bake(self):
1137 selected_image = self.recipe_model.get_selected_image()
1138 selected_packages = self.package_model.get_selected_packages() or []
1139
1140 # If no base image and no selected packages don't build anything
1141 if not (selected_packages or selected_image != self.recipe_model.__custom_image__):
1142 lbl = "<b>No selections made</b>"
1143 msg = "You have not made any selections"
1144 msg = msg + " so there isn't anything to bake at this time."
1145 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
1146 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1147 HobButton.style_button(button)
1148 dialog.run()
1149 dialog.destroy()
1150 return
1151
1152 self.fast_generate_image_async(True)
1153
1154 def show_recipe_property_dialog(self, properties):
1155 information = {}
1156 dialog = PropertyDialog(title = properties["name"] +' '+ "properties",
1157 parent = self,
1158 information = properties,
1159 flags = gtk.DIALOG_DESTROY_WITH_PARENT
1160 | gtk.DIALOG_NO_SEPARATOR)
1161
1162 dialog.set_modal(False)
1163
1164 button = dialog.add_button("Close", gtk.RESPONSE_NO)
1165 HobAltButton.style_button(button)
1166 button.connect("clicked", lambda w: dialog.destroy())
1167
1168 dialog.run()
1169
1170 def show_packages_property_dialog(self, properties):
1171 information = {}
1172 dialog = PropertyDialog(title = properties["name"] +' '+ "properties",
1173 parent = self,
1174 information = properties,
1175 flags = gtk.DIALOG_DESTROY_WITH_PARENT
1176 | gtk.DIALOG_NO_SEPARATOR)
1177
1178 dialog.set_modal(False)
1179
1180 button = dialog.add_button("Close", gtk.RESPONSE_NO)
1181 HobAltButton.style_button(button)
1182 button.connect("clicked", lambda w: dialog.destroy())
1183
1184 dialog.run()
1185
1186 def show_layer_selection_dialog(self):
1187 dialog = LayerSelectionDialog(title = "Layers",
1188 layers = copy.deepcopy(self.configuration.layers),
1189 layers_non_removable = copy.deepcopy(self.configuration.layers_non_removable),
1190 all_layers = self.parameters.all_layers,
1191 parent = self,
1192 flags = gtk.DIALOG_MODAL
1193 | gtk.DIALOG_DESTROY_WITH_PARENT
1194 | gtk.DIALOG_NO_SEPARATOR)
1195 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1196 HobAltButton.style_button(button)
1197 button = dialog.add_button("OK", gtk.RESPONSE_YES)
1198 HobButton.style_button(button)
1199 response = dialog.run()
1200 if response == gtk.RESPONSE_YES:
1201 self.configuration.layers = dialog.layers
1202 # DO refresh layers
1203 if dialog.layers_changed:
1204 self.update_config_async()
1205 dialog.destroy()
1206
1207 def get_image_extension(self):
1208 image_extension = {}
1209 for type in self.parameters.image_types:
1210 ext = self.handler.runCommand(["getVariable", "IMAGE_EXTENSION_%s" % type])
1211 if ext:
1212 image_extension[type] = ext.split(' ')
1213
1214 return image_extension
1215
1216 def show_load_my_images_dialog(self):
1217 image_extension = self.get_image_extension()
1218 dialog = ImageSelectionDialog(self.parameters.image_addr, self.parameters.image_types,
1219 "Open My Images", self,
1220 gtk.FILE_CHOOSER_ACTION_SAVE, None,
1221 image_extension)
1222 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1223 HobAltButton.style_button(button)
1224 button = dialog.add_button("Open", gtk.RESPONSE_YES)
1225 HobButton.style_button(button)
1226 response = dialog.run()
1227 if response == gtk.RESPONSE_YES:
1228 if not dialog.image_names:
1229 lbl = "<b>No selections made</b>"
1230 msg = "You have not made any selections"
1231 crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
1232 button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
1233 HobButton.style_button(button)
1234 crumbs_dialog.run()
1235 crumbs_dialog.destroy()
1236 dialog.destroy()
1237 return
1238
1239 self.parameters.image_addr = dialog.image_folder
1240 self.parameters.image_names = dialog.image_names[:]
1241 self.switch_page(self.MY_IMAGE_OPENED)
1242
1243 dialog.destroy()
1244
1245 def show_adv_settings_dialog(self, tab=None):
1246 dialog = AdvancedSettingsDialog(title = "Advanced configuration",
1247 configuration = copy.deepcopy(self.configuration),
1248 all_image_types = self.parameters.image_types,
1249 all_package_formats = self.parameters.all_package_formats,
1250 all_distros = self.parameters.all_distros,
1251 all_sdk_machines = self.parameters.all_sdk_machines,
1252 max_threads = self.parameters.max_threads,
1253 parent = self,
1254 flags = gtk.DIALOG_MODAL
1255 | gtk.DIALOG_DESTROY_WITH_PARENT
1256 | gtk.DIALOG_NO_SEPARATOR)
1257 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1258 HobAltButton.style_button(button)
1259 button = dialog.add_button("Save", gtk.RESPONSE_YES)
1260 HobButton.style_button(button)
1261 dialog.set_save_button(button)
1262 response = dialog.run()
1263 settings_changed = False
1264 if response == gtk.RESPONSE_YES:
1265 self.configuration = dialog.configuration
1266 self.configuration.save(self.handler, True) # remember settings
1267 settings_changed = dialog.settings_changed
1268 dialog.destroy()
1269 return response == gtk.RESPONSE_YES, settings_changed
1270
1271 def show_simple_settings_dialog(self, tab=None):
1272 dialog = SimpleSettingsDialog(title = "Settings",
1273 configuration = copy.deepcopy(self.configuration),
1274 all_image_types = self.parameters.image_types,
1275 all_package_formats = self.parameters.all_package_formats,
1276 all_distros = self.parameters.all_distros,
1277 all_sdk_machines = self.parameters.all_sdk_machines,
1278 max_threads = self.parameters.max_threads,
1279 parent = self,
1280 flags = gtk.DIALOG_MODAL
1281 | gtk.DIALOG_DESTROY_WITH_PARENT
1282 | gtk.DIALOG_NO_SEPARATOR,
1283 handler = self.handler)
1284 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1285 HobAltButton.style_button(button)
1286 button = dialog.add_button("Save", gtk.RESPONSE_YES)
1287 HobButton.style_button(button)
1288 if tab:
1289 dialog.switch_to_page(tab)
1290 response = dialog.run()
1291 settings_changed = False
1292 if response == gtk.RESPONSE_YES:
1293 self.configuration = dialog.configuration
1294 self.configuration.save(self.handler, True) # remember settings
1295 settings_changed = dialog.settings_changed
1296 if dialog.proxy_settings_changed:
1297 self.set_user_config_proxies()
1298 elif dialog.proxy_test_ran:
1299 # The user might have modified the proxies in the "Proxy"
1300 # tab, which in turn made the proxy settings modify in bb.
1301 # If "Cancel" was pressed, restore the previous proxy
1302 # settings inside bb.
1303 self.set_user_config_proxies()
1304 dialog.destroy()
1305 return response == gtk.RESPONSE_YES, settings_changed
1306
1307 def reparse_post_adv_settings(self):
1308 if not self.configuration.curr_mach:
1309 self.update_config_async()
1310 else:
1311 self.configuration.clear_selection()
1312 # DO reparse recipes
1313 self.populate_recipe_package_info_async()
1314
1315 def deploy_image(self, image_name):
1316 if not image_name:
1317 lbl = "<b>Please select an image to deploy.</b>"
1318 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO)
1319 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1320 HobButton.style_button(button)
1321 dialog.run()
1322 dialog.destroy()
1323 return
1324
1325 image_path = os.path.join(self.parameters.image_addr, image_name)
1326 dialog = DeployImageDialog(title = "Usb Image Maker",
1327 image_path = image_path,
1328 parent = self,
1329 flags = gtk.DIALOG_MODAL
1330 | gtk.DIALOG_DESTROY_WITH_PARENT
1331 | gtk.DIALOG_NO_SEPARATOR)
1332 button = dialog.add_button("Close", gtk.RESPONSE_NO)
1333 HobAltButton.style_button(button)
1334 button = dialog.add_button("Make usb image", gtk.RESPONSE_YES)
1335 HobButton.style_button(button)
1336 response = dialog.run()
1337 dialog.destroy()
1338
1339 def show_load_kernel_dialog(self):
1340 dialog = gtk.FileChooserDialog("Load Kernel Files", self,
1341 gtk.FILE_CHOOSER_ACTION_SAVE)
1342 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
1343 HobAltButton.style_button(button)
1344 button = dialog.add_button("Open", gtk.RESPONSE_YES)
1345 HobButton.style_button(button)
1346 filter = gtk.FileFilter()
1347 filter.set_name("Kernel Files")
1348 filter.add_pattern("*.bin")
1349 dialog.add_filter(filter)
1350
1351 dialog.set_current_folder(self.parameters.image_addr)
1352
1353 response = dialog.run()
1354 kernel_path = ""
1355 if response == gtk.RESPONSE_YES:
1356 kernel_path = dialog.get_filename()
1357
1358 dialog.destroy()
1359
1360 return kernel_path
1361
1362 def runqemu_image(self, image_name, kernel_name):
1363 if not image_name or not kernel_name:
1364 lbl = "<b>Please select %s to launch in QEMU.</b>" % ("a kernel" if image_name else "an image")
1365 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO)
1366 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1367 HobButton.style_button(button)
1368 dialog.run()
1369 dialog.destroy()
1370 return
1371
1372 kernel_path = os.path.join(self.parameters.image_addr, kernel_name)
1373 image_path = os.path.join(self.parameters.image_addr, image_name)
1374
1375 source_env_path = os.path.join(self.parameters.core_base, "oe-init-build-env")
1376 tmp_path = self.parameters.tmpdir
1377 cmdline = bb.ui.crumbs.utils.which_terminal()
1378 if os.path.exists(image_path) and os.path.exists(kernel_path) \
1379 and os.path.exists(source_env_path) and os.path.exists(tmp_path) \
1380 and cmdline:
1381 cmdline += "\' bash -c \"export OE_TMPDIR=" + tmp_path + "; "
1382 cmdline += "source " + source_env_path + " " + os.getcwd() + "; "
1383 cmdline += "runqemu " + kernel_path + " " + image_path + "\"\'"
1384 subprocess.Popen(shlex.split(cmdline))
1385 else:
1386 lbl = "<b>Path error</b>"
1387 msg = "One of your paths is wrong,"
1388 msg = msg + " please make sure the following paths exist:\n"
1389 msg = msg + "image path:" + image_path + "\n"
1390 msg = msg + "kernel path:" + kernel_path + "\n"
1391 msg = msg + "source environment path:" + source_env_path + "\n"
1392 msg = msg + "tmp path: " + tmp_path + "."
1393 msg = msg + "You may be missing either xterm or vte for terminal services."
1394 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_ERROR, msg)
1395 button = dialog.add_button("Close", gtk.RESPONSE_OK)
1396 HobButton.style_button(button)
1397 dialog.run()
1398 dialog.destroy()
1399
1400 def show_packages(self):
1401 self.package_details_page.refresh_tables()
1402 self.switch_page(self.PACKAGE_SELECTION)
1403
1404 def show_recipes(self):
1405 self.switch_page(self.RECIPE_SELECTION)
1406
1407 def show_image_details(self):
1408 self.switch_page(self.IMAGE_GENERATED)
1409
1410 def show_configuration(self):
1411 self.switch_page(self.BASEIMG_SELECTED)
1412
1413 def stop_build(self):
1414 if self.stopping:
1415 lbl = "<b>Force Stop build?</b>"
1416 msg = "You've already selected Stop once,"
1417 msg = msg + " would you like to 'Force Stop' the build?\n\n"
1418 msg = msg + "This will stop the build as quickly as possible but may"
1419 msg = msg + " well leave your build directory in an unusable state"
1420 msg = msg + " that requires manual steps to fix."
1421 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
1422 button = dialog.add_button("Cancel", gtk.RESPONSE_CANCEL)
1423 HobAltButton.style_button(button)
1424 button = dialog.add_button("Force Stop", gtk.RESPONSE_YES)
1425 HobButton.style_button(button)
1426 else:
1427 lbl = "<b>Stop build?</b>"
1428 msg = "Are you sure you want to stop this"
1429 msg = msg + " build?\n\n'Stop' will stop the build as soon as all in"
1430 msg = msg + " progress build tasks are finished. However if a"
1431 msg = msg + " lengthy compilation phase is in progress this may take"
1432 msg = msg + " some time.\n\n"
1433 msg = msg + "'Force Stop' will stop the build as quickly as"
1434 msg = msg + " possible but may well leave your build directory in an"
1435 msg = msg + " unusable state that requires manual steps to fix."
1436 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
1437 button = dialog.add_button("Cancel", gtk.RESPONSE_CANCEL)
1438 HobAltButton.style_button(button)
1439 button = dialog.add_button("Force stop", gtk.RESPONSE_YES)
1440 HobAltButton.style_button(button)
1441 button = dialog.add_button("Stop", gtk.RESPONSE_OK)
1442 HobButton.style_button(button)
1443 response = dialog.run()
1444 dialog.destroy()
1445 if response != gtk.RESPONSE_CANCEL:
1446 self.stopping = True
1447 if response == gtk.RESPONSE_OK:
1448 self.build_details_page.progress_bar.set_stop_title("Stopping the build....")
1449 self.build_details_page.progress_bar.set_rcstyle("stop")
1450 self.cancel_build_sync()
1451 elif response == gtk.RESPONSE_YES:
1452 self.cancel_build_sync(True)
1453
1454 def do_log(self, consolelogfile = None):
1455 if consolelogfile:
1456 bb.utils.mkdirhier(os.path.dirname(consolelogfile))
1457 if self.consolelog:
1458 self.logger.removeHandler(self.consolelog)
1459 self.consolelog = None
1460 self.consolelog = logging.FileHandler(consolelogfile)
1461 bb.msg.addDefaultlogFilter(self.consolelog)
1462 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
1463 self.consolelog.setFormatter(format)
1464
1465 self.logger.addHandler(self.consolelog)
1466
1467 def get_topdir(self):
1468 return self.handler.get_topdir()
1469
1470 def wait(self, delay):
1471 time_start = time.time()
1472 time_end = time_start + delay
1473 while time_end > time.time():
1474 while gtk.events_pending():
1475 gtk.main_iteration()
diff --git a/bitbake/lib/bb/ui/crumbs/buildmanager.py b/bitbake/lib/bb/ui/crumbs/buildmanager.py
new file mode 100644
index 0000000000..e858d75e4c
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/buildmanager.py
@@ -0,0 +1,455 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2008 Intel Corporation
5#
6# Authored by Rob Bradford <rob@linux.intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22import gobject
23import threading
24import os
25import datetime
26import time
27
28class BuildConfiguration:
29 """ Represents a potential *or* historic *or* concrete build. It
30 encompasses all the things that we need to tell bitbake to do to make it
31 build what we want it to build.
32
33 It also stored the metadata URL and the set of possible machines (and the
34 distros / images / uris for these. Apart from the metdata URL these are
35 not serialised to file (since they may be transient). In some ways this
36 functionality might be shifted to the loader class."""
37
38 def __init__ (self):
39 self.metadata_url = None
40
41 # Tuple of (distros, image, urls)
42 self.machine_options = {}
43
44 self.machine = None
45 self.distro = None
46 self.image = None
47 self.urls = []
48 self.extra_urls = []
49 self.extra_pkgs = []
50
51 def get_machines_model (self):
52 model = gtk.ListStore (gobject.TYPE_STRING)
53 for machine in self.machine_options.keys():
54 model.append ([machine])
55
56 return model
57
58 def get_distro_and_images_models (self, machine):
59 distro_model = gtk.ListStore (gobject.TYPE_STRING)
60
61 for distro in self.machine_options[machine][0]:
62 distro_model.append ([distro])
63
64 image_model = gtk.ListStore (gobject.TYPE_STRING)
65
66 for image in self.machine_options[machine][1]:
67 image_model.append ([image])
68
69 return (distro_model, image_model)
70
71 def get_repos (self):
72 self.urls = self.machine_options[self.machine][2]
73 return self.urls
74
75 # It might be a lot lot better if we stored these in like, bitbake conf
76 # file format.
77 @staticmethod
78 def load_from_file (filename):
79
80 conf = BuildConfiguration()
81 with open(filename, "r") as f:
82 for line in f:
83 data = line.split (";")[1]
84 if (line.startswith ("metadata-url;")):
85 conf.metadata_url = data.strip()
86 continue
87 if (line.startswith ("url;")):
88 conf.urls += [data.strip()]
89 continue
90 if (line.startswith ("extra-url;")):
91 conf.extra_urls += [data.strip()]
92 continue
93 if (line.startswith ("machine;")):
94 conf.machine = data.strip()
95 continue
96 if (line.startswith ("distribution;")):
97 conf.distro = data.strip()
98 continue
99 if (line.startswith ("image;")):
100 conf.image = data.strip()
101 continue
102
103 return conf
104
105 # Serialise to a file. This is part of the build process and we use this
106 # to be able to repeat a given build (using the same set of parameters)
107 # but also so that we can include the details of the image / machine /
108 # distro in the build manager tree view.
109 def write_to_file (self, filename):
110 f = open (filename, "w")
111
112 lines = []
113
114 if (self.metadata_url):
115 lines += ["metadata-url;%s\n" % (self.metadata_url)]
116
117 for url in self.urls:
118 lines += ["url;%s\n" % (url)]
119
120 for url in self.extra_urls:
121 lines += ["extra-url;%s\n" % (url)]
122
123 if (self.machine):
124 lines += ["machine;%s\n" % (self.machine)]
125
126 if (self.distro):
127 lines += ["distribution;%s\n" % (self.distro)]
128
129 if (self.image):
130 lines += ["image;%s\n" % (self.image)]
131
132 f.writelines (lines)
133 f.close ()
134
135class BuildResult(gobject.GObject):
136 """ Represents an historic build. Perhaps not successful. But it includes
137 things such as the files that are in the directory (the output from the
138 build) as well as a deserialised BuildConfiguration file that is stored in
139 ".conf" in the directory for the build.
140
141 This is GObject so that it can be included in the TreeStore."""
142
143 (STATE_COMPLETE, STATE_FAILED, STATE_ONGOING) = \
144 (0, 1, 2)
145
146 def __init__ (self, parent, identifier):
147 gobject.GObject.__init__ (self)
148 self.date = None
149
150 self.files = []
151 self.status = None
152 self.identifier = identifier
153 self.path = os.path.join (parent, identifier)
154
155 # Extract the date, since the directory name is of the
156 # format build-<year><month><day>-<ordinal> we can easily
157 # pull it out.
158 # TODO: Better to stat a file?
159 (_, date, revision) = identifier.split ("-")
160 print(date)
161
162 year = int (date[0:4])
163 month = int (date[4:6])
164 day = int (date[6:8])
165
166 self.date = datetime.date (year, month, day)
167
168 self.conf = None
169
170 # By default builds are STATE_FAILED unless we find a "complete" file
171 # in which case they are STATE_COMPLETE
172 self.state = BuildResult.STATE_FAILED
173 for file in os.listdir (self.path):
174 if (file.startswith (".conf")):
175 conffile = os.path.join (self.path, file)
176 self.conf = BuildConfiguration.load_from_file (conffile)
177 elif (file.startswith ("complete")):
178 self.state = BuildResult.STATE_COMPLETE
179 else:
180 self.add_file (file)
181
182 def add_file (self, file):
183 # Just add the file for now. Don't care about the type.
184 self.files += [(file, None)]
185
186class BuildManagerModel (gtk.TreeStore):
187 """ Model for the BuildManagerTreeView. This derives from gtk.TreeStore
188 but it abstracts nicely what the columns mean and the setup of the columns
189 in the model. """
190
191 (COL_IDENT, COL_DESC, COL_MACHINE, COL_DISTRO, COL_BUILD_RESULT, COL_DATE, COL_STATE) = \
192 (0, 1, 2, 3, 4, 5, 6)
193
194 def __init__ (self):
195 gtk.TreeStore.__init__ (self,
196 gobject.TYPE_STRING,
197 gobject.TYPE_STRING,
198 gobject.TYPE_STRING,
199 gobject.TYPE_STRING,
200 gobject.TYPE_OBJECT,
201 gobject.TYPE_INT64,
202 gobject.TYPE_INT)
203
204class BuildManager (gobject.GObject):
205 """ This class manages the historic builds that have been found in the
206 "results" directory but is also used for starting a new build."""
207
208 __gsignals__ = {
209 'population-finished' : (gobject.SIGNAL_RUN_LAST,
210 gobject.TYPE_NONE,
211 ()),
212 'populate-error' : (gobject.SIGNAL_RUN_LAST,
213 gobject.TYPE_NONE,
214 ())
215 }
216
217 def update_build_result (self, result, iter):
218 # Convert the date into something we can sort by.
219 date = long (time.mktime (result.date.timetuple()))
220
221 # Add a top level entry for the build
222
223 self.model.set (iter,
224 BuildManagerModel.COL_IDENT, result.identifier,
225 BuildManagerModel.COL_DESC, result.conf.image,
226 BuildManagerModel.COL_MACHINE, result.conf.machine,
227 BuildManagerModel.COL_DISTRO, result.conf.distro,
228 BuildManagerModel.COL_BUILD_RESULT, result,
229 BuildManagerModel.COL_DATE, date,
230 BuildManagerModel.COL_STATE, result.state)
231
232 # And then we use the files in the directory as the children for the
233 # top level iter.
234 for file in result.files:
235 self.model.append (iter, (None, file[0], None, None, None, date, -1))
236
237 # This function is called as an idle by the BuildManagerPopulaterThread
238 def add_build_result (self, result):
239 gtk.gdk.threads_enter()
240 self.known_builds += [result]
241
242 self.update_build_result (result, self.model.append (None))
243
244 gtk.gdk.threads_leave()
245
246 def notify_build_finished (self):
247 # This is a bit of a hack. If we have a running build running then we
248 # will have a row in the model in STATE_ONGOING. Find it and make it
249 # as if it was a proper historic build (well, it is completed now....)
250
251 # We need to use the iters here rather than the Python iterator
252 # interface to the model since we need to pass it into
253 # update_build_result
254
255 iter = self.model.get_iter_first()
256
257 while (iter):
258 (ident, state) = self.model.get(iter,
259 BuildManagerModel.COL_IDENT,
260 BuildManagerModel.COL_STATE)
261
262 if state == BuildResult.STATE_ONGOING:
263 result = BuildResult (self.results_directory, ident)
264 self.update_build_result (result, iter)
265 iter = self.model.iter_next(iter)
266
267 def notify_build_succeeded (self):
268 # Write the "complete" file so that when we create the BuildResult
269 # object we put into the model
270
271 complete_file_path = os.path.join (self.cur_build_directory, "complete")
272 f = file (complete_file_path, "w")
273 f.close()
274 self.notify_build_finished()
275
276 def notify_build_failed (self):
277 # Without a "complete" file then this will mark the build as failed:
278 self.notify_build_finished()
279
280 # This function is called as an idle
281 def emit_population_finished_signal (self):
282 gtk.gdk.threads_enter()
283 self.emit ("population-finished")
284 gtk.gdk.threads_leave()
285
286 class BuildManagerPopulaterThread (threading.Thread):
287 def __init__ (self, manager, directory):
288 threading.Thread.__init__ (self)
289 self.manager = manager
290 self.directory = directory
291
292 def run (self):
293 # For each of the "build-<...>" directories ..
294
295 if os.path.exists (self.directory):
296 for directory in os.listdir (self.directory):
297
298 if not directory.startswith ("build-"):
299 continue
300
301 build_result = BuildResult (self.directory, directory)
302 self.manager.add_build_result (build_result)
303
304 gobject.idle_add (BuildManager.emit_population_finished_signal,
305 self.manager)
306
307 def __init__ (self, server, results_directory):
308 gobject.GObject.__init__ (self)
309
310 # The builds that we've found from walking the result directory
311 self.known_builds = []
312
313 # Save out the bitbake server, we need this for issuing commands to
314 # the cooker:
315 self.server = server
316
317 # The TreeStore that we use
318 self.model = BuildManagerModel ()
319
320 # The results directory is where we create (and look for) the
321 # build-<xyz>-<n> directories. We need to populate ourselves from
322 # directory
323 self.results_directory = results_directory
324 self.populate_from_directory (self.results_directory)
325
326 def populate_from_directory (self, directory):
327 thread = BuildManager.BuildManagerPopulaterThread (self, directory)
328 thread.start()
329
330 # Come up with the name for the next build ident by combining "build-"
331 # with the date formatted as yyyymmdd and then an ordinal. We do this by
332 # an optimistic algorithm incrementing the ordinal if we find that it
333 # already exists.
334 def get_next_build_ident (self):
335 today = datetime.date.today ()
336 datestr = str (today.year) + str (today.month) + str (today.day)
337
338 revision = 0
339 test_name = "build-%s-%d" % (datestr, revision)
340 test_path = os.path.join (self.results_directory, test_name)
341
342 while (os.path.exists (test_path)):
343 revision += 1
344 test_name = "build-%s-%d" % (datestr, revision)
345 test_path = os.path.join (self.results_directory, test_name)
346
347 return test_name
348
349 # Take a BuildConfiguration and then try and build it based on the
350 # parameters of that configuration. S
351 def do_build (self, conf):
352 server = self.server
353
354 # Work out the build directory. Note we actually create the
355 # directories here since we need to write the ".conf" file. Otherwise
356 # we could have relied on bitbake's builder thread to actually make
357 # the directories as it proceeds with the build.
358 ident = self.get_next_build_ident ()
359 build_directory = os.path.join (self.results_directory,
360 ident)
361 self.cur_build_directory = build_directory
362 os.makedirs (build_directory)
363
364 conffile = os.path.join (build_directory, ".conf")
365 conf.write_to_file (conffile)
366
367 # Add a row to the model representing this ongoing build. It's kinda a
368 # fake entry. If this build completes or fails then this gets updated
369 # with the real stuff like the historic builds
370 date = long (time.time())
371 self.model.append (None, (ident, conf.image, conf.machine, conf.distro,
372 None, date, BuildResult.STATE_ONGOING))
373 try:
374 server.runCommand(["setVariable", "BUILD_IMAGES_FROM_FEEDS", 1])
375 server.runCommand(["setVariable", "MACHINE", conf.machine])
376 server.runCommand(["setVariable", "DISTRO", conf.distro])
377 server.runCommand(["setVariable", "PACKAGE_CLASSES", "package_ipk"])
378 server.runCommand(["setVariable", "BBFILES", \
379 """${OEROOT}/meta/packages/*/*.bb ${OEROOT}/meta-moblin/packages/*/*.bb"""])
380 server.runCommand(["setVariable", "TMPDIR", "${OEROOT}/build/tmp"])
381 server.runCommand(["setVariable", "IPK_FEED_URIS", \
382 " ".join(conf.get_repos())])
383 server.runCommand(["setVariable", "DEPLOY_DIR_IMAGE",
384 build_directory])
385 server.runCommand(["buildTargets", [conf.image], "rootfs"])
386
387 except Exception as e:
388 print(e)
389
390class BuildManagerTreeView (gtk.TreeView):
391 """ The tree view for the build manager. This shows the historic builds
392 and so forth. """
393
394 # We use this function to control what goes in the cell since we store
395 # the date in the model as seconds since the epoch (for sorting) and so we
396 # need to make it human readable.
397 def date_format_custom_cell_data_func (self, col, cell, model, iter):
398 date = model.get (iter, BuildManagerModel.COL_DATE)[0]
399 datestr = time.strftime("%A %d %B %Y", time.localtime(date))
400 cell.set_property ("text", datestr)
401
402 # This format function controls what goes in the cell. We use this to map
403 # the integer state to a string and also to colourise the text
404 def state_format_custom_cell_data_fun (self, col, cell, model, iter):
405 state = model.get (iter, BuildManagerModel.COL_STATE)[0]
406
407 if (state == BuildResult.STATE_ONGOING):
408 cell.set_property ("text", "Active")
409 cell.set_property ("foreground", "#000000")
410 elif (state == BuildResult.STATE_FAILED):
411 cell.set_property ("text", "Failed")
412 cell.set_property ("foreground", "#ff0000")
413 elif (state == BuildResult.STATE_COMPLETE):
414 cell.set_property ("text", "Complete")
415 cell.set_property ("foreground", "#00ff00")
416 else:
417 cell.set_property ("text", "")
418
419 def __init__ (self):
420 gtk.TreeView.__init__(self)
421
422 # Misc descriptiony thing
423 renderer = gtk.CellRendererText ()
424 col = gtk.TreeViewColumn (None, renderer,
425 text=BuildManagerModel.COL_DESC)
426 self.append_column (col)
427
428 # Machine
429 renderer = gtk.CellRendererText ()
430 col = gtk.TreeViewColumn ("Machine", renderer,
431 text=BuildManagerModel.COL_MACHINE)
432 self.append_column (col)
433
434 # distro
435 renderer = gtk.CellRendererText ()
436 col = gtk.TreeViewColumn ("Distribution", renderer,
437 text=BuildManagerModel.COL_DISTRO)
438 self.append_column (col)
439
440 # date (using a custom function for formatting the cell contents it
441 # takes epoch -> human readable string)
442 renderer = gtk.CellRendererText ()
443 col = gtk.TreeViewColumn ("Date", renderer,
444 text=BuildManagerModel.COL_DATE)
445 self.append_column (col)
446 col.set_cell_data_func (renderer,
447 self.date_format_custom_cell_data_func)
448
449 # For status.
450 renderer = gtk.CellRendererText ()
451 col = gtk.TreeViewColumn ("Status", renderer,
452 text = BuildManagerModel.COL_STATE)
453 self.append_column (col)
454 col.set_cell_data_func (renderer,
455 self.state_format_custom_cell_data_fun)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/__init__.py b/bitbake/lib/bb/ui/crumbs/hig/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/__init__.py
diff --git a/bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py
new file mode 100644
index 0000000000..e0b3553c2f
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py
@@ -0,0 +1,341 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import hashlib
25from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton
26from bb.ui.crumbs.progressbar import HobProgressBar
27from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
28from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
29from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
30from bb.ui.crumbs.hig.proxydetailsdialog import ProxyDetailsDialog
31
32"""
33The following are convenience classes for implementing GNOME HIG compliant
34BitBake GUI's
35In summary: spacing = 12px, border-width = 6px
36"""
37
38class AdvancedSettingsDialog (CrumbsDialog, SettingsUIHelper):
39
40 def details_cb(self, button, parent, protocol):
41 dialog = ProxyDetailsDialog(title = protocol.upper() + " Proxy Details",
42 user = self.configuration.proxies[protocol][1],
43 passwd = self.configuration.proxies[protocol][2],
44 parent = parent,
45 flags = gtk.DIALOG_MODAL
46 | gtk.DIALOG_DESTROY_WITH_PARENT
47 | gtk.DIALOG_NO_SEPARATOR)
48 dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
49 response = dialog.run()
50 if response == gtk.RESPONSE_OK:
51 self.configuration.proxies[protocol][1] = dialog.user
52 self.configuration.proxies[protocol][2] = dialog.passwd
53 self.refresh_proxy_components()
54 dialog.destroy()
55
56 def set_save_button(self, button):
57 self.save_button = button
58
59 def rootfs_combo_changed_cb(self, rootfs_combo, all_package_format, check_hbox):
60 combo_item = self.rootfs_combo.get_active_text()
61 modified = False
62 for child in check_hbox.get_children():
63 if isinstance(child, gtk.CheckButton):
64 check_hbox.remove(child)
65 modified = True
66 for format in all_package_format:
67 if format != combo_item:
68 check_button = gtk.CheckButton(format)
69 check_hbox.pack_start(check_button, expand=False, fill=False)
70 modified = True
71 if modified:
72 check_hbox.remove(self.pkgfmt_info)
73 check_hbox.pack_start(self.pkgfmt_info, expand=False, fill=False)
74 check_hbox.show_all()
75
76 def gen_pkgfmt_widget(self, curr_package_format, all_package_format, tooltip_combo="", tooltip_extra=""):
77 pkgfmt_vbox = gtk.VBox(False, 6)
78
79 label = self.gen_label_widget("Root file system package format")
80 pkgfmt_vbox.pack_start(label, expand=False, fill=False)
81
82 rootfs_format = ""
83 if curr_package_format:
84 rootfs_format = curr_package_format.split()[0]
85
86 rootfs_format_widget, rootfs_combo = self.gen_combo_widget(rootfs_format, all_package_format, tooltip_combo)
87 pkgfmt_vbox.pack_start(rootfs_format_widget, expand=False, fill=False)
88
89 label = self.gen_label_widget("Additional package formats")
90 pkgfmt_vbox.pack_start(label, expand=False, fill=False)
91
92 check_hbox = gtk.HBox(False, 12)
93 pkgfmt_vbox.pack_start(check_hbox, expand=False, fill=False)
94 for format in all_package_format:
95 if format != rootfs_format:
96 check_button = gtk.CheckButton(format)
97 is_active = (format in curr_package_format.split())
98 check_button.set_active(is_active)
99 check_hbox.pack_start(check_button, expand=False, fill=False)
100
101 self.pkgfmt_info = HobInfoButton(tooltip_extra, self)
102 check_hbox.pack_start(self.pkgfmt_info, expand=False, fill=False)
103
104 rootfs_combo.connect("changed", self.rootfs_combo_changed_cb, all_package_format, check_hbox)
105
106 pkgfmt_vbox.show_all()
107
108 return pkgfmt_vbox, rootfs_combo, check_hbox
109
110 def __init__(self, title, configuration, all_image_types,
111 all_package_formats, all_distros, all_sdk_machines,
112 max_threads, parent, flags, buttons=None):
113 super(AdvancedSettingsDialog, self).__init__(title, parent, flags, buttons)
114
115 # class members from other objects
116 # bitbake settings from Builder.Configuration
117 self.configuration = configuration
118 self.image_types = all_image_types
119 self.all_package_formats = all_package_formats
120 self.all_distros = all_distros[:]
121 self.all_sdk_machines = all_sdk_machines
122 self.max_threads = max_threads
123
124 # class members for internal use
125 self.distro_combo = None
126 self.dldir_text = None
127 self.sstatedir_text = None
128 self.sstatemirror_text = None
129 self.bb_spinner = None
130 self.pmake_spinner = None
131 self.rootfs_size_spinner = None
132 self.extra_size_spinner = None
133 self.gplv3_checkbox = None
134 self.sdk_checkbox = None
135 self.image_types_checkbuttons = {}
136
137 self.md5 = self.config_md5()
138 self.settings_changed = False
139
140 # create visual elements on the dialog
141 self.save_button = None
142 self.create_visual_elements()
143 self.connect("response", self.response_cb)
144
145 def _get_sorted_value(self, var):
146 return " ".join(sorted(str(var).split())) + "\n"
147
148 def config_md5(self):
149 data = ""
150 data += ("PACKAGE_CLASSES: " + self.configuration.curr_package_format + '\n')
151 data += ("DISTRO: " + self._get_sorted_value(self.configuration.curr_distro))
152 data += ("IMAGE_ROOTFS_SIZE: " + self._get_sorted_value(self.configuration.image_rootfs_size))
153 data += ("IMAGE_EXTRA_SIZE: " + self._get_sorted_value(self.configuration.image_extra_size))
154 data += ("INCOMPATIBLE_LICENSE: " + self._get_sorted_value(self.configuration.incompat_license))
155 data += ("SDK_MACHINE: " + self._get_sorted_value(self.configuration.curr_sdk_machine))
156 data += ("TOOLCHAIN_BUILD: " + self._get_sorted_value(self.configuration.toolchain_build))
157 data += ("IMAGE_FSTYPES: " + self._get_sorted_value(self.configuration.image_fstypes))
158 return hashlib.md5(data).hexdigest()
159
160 def create_visual_elements(self):
161 self.nb = gtk.Notebook()
162 self.nb.set_show_tabs(True)
163 self.nb.append_page(self.create_image_types_page(), gtk.Label("Image types"))
164 self.nb.append_page(self.create_output_page(), gtk.Label("Output"))
165 self.nb.set_current_page(0)
166 self.vbox.pack_start(self.nb, expand=True, fill=True)
167 self.vbox.pack_end(gtk.HSeparator(), expand=True, fill=True)
168
169 self.show_all()
170
171 def get_num_checked_image_types(self):
172 total = 0
173 for b in self.image_types_checkbuttons.values():
174 if b.get_active():
175 total = total + 1
176 return total
177
178 def set_save_button_state(self):
179 if self.save_button:
180 self.save_button.set_sensitive(self.get_num_checked_image_types() > 0)
181
182 def image_type_checkbutton_clicked_cb(self, button):
183 self.set_save_button_state()
184 if self.get_num_checked_image_types() == 0:
185 # Show an error dialog
186 lbl = "<b>Select an image type</b>"
187 msg = "You need to select at least one image type."
188 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
189 button = dialog.add_button("OK", gtk.RESPONSE_OK)
190 HobButton.style_button(button)
191 response = dialog.run()
192 dialog.destroy()
193
194 def create_image_types_page(self):
195 main_vbox = gtk.VBox(False, 16)
196 main_vbox.set_border_width(6)
197
198 advanced_vbox = gtk.VBox(False, 6)
199 advanced_vbox.set_border_width(6)
200
201 distro_vbox = gtk.VBox(False, 6)
202 label = self.gen_label_widget("Distro:")
203 tooltip = "Selects the Yocto Project distribution you want"
204 try:
205 i = self.all_distros.index( "defaultsetup" )
206 except ValueError:
207 i = -1
208 if i != -1:
209 self.all_distros[ i ] = "Default"
210 if self.configuration.curr_distro == "defaultsetup":
211 self.configuration.curr_distro = "Default"
212 distro_widget, self.distro_combo = self.gen_combo_widget(self.configuration.curr_distro, self.all_distros,"<b>Distro</b>" + "*" + tooltip)
213 distro_vbox.pack_start(label, expand=False, fill=False)
214 distro_vbox.pack_start(distro_widget, expand=False, fill=False)
215 main_vbox.pack_start(distro_vbox, expand=False, fill=False)
216
217
218 rows = (len(self.image_types)+1)/3
219 table = gtk.Table(rows + 1, 10, True)
220 advanced_vbox.pack_start(table, expand=False, fill=False)
221
222 tooltip = "Image file system types you want."
223 info = HobInfoButton("<b>Image types</b>" + "*" + tooltip, self)
224 label = self.gen_label_widget("Image types:")
225 align = gtk.Alignment(0, 0.5, 0, 0)
226 table.attach(align, 0, 4, 0, 1)
227 align.add(label)
228 table.attach(info, 4, 5, 0, 1)
229
230 i = 1
231 j = 1
232 for image_type in sorted(self.image_types):
233 self.image_types_checkbuttons[image_type] = gtk.CheckButton(image_type)
234 self.image_types_checkbuttons[image_type].connect("toggled", self.image_type_checkbutton_clicked_cb)
235 article = ""
236 if image_type.startswith(("a", "e", "i", "o", "u")):
237 article = "n"
238 if image_type == "live":
239 self.image_types_checkbuttons[image_type].set_tooltip_text("Build iso and hddimg images")
240 else:
241 self.image_types_checkbuttons[image_type].set_tooltip_text("Build a%s %s image" % (article, image_type))
242 table.attach(self.image_types_checkbuttons[image_type], j - 1, j + 3, i, i + 1)
243 if image_type in self.configuration.image_fstypes.split():
244 self.image_types_checkbuttons[image_type].set_active(True)
245 i += 1
246 if i > rows:
247 i = 1
248 j = j + 4
249
250 main_vbox.pack_start(advanced_vbox, expand=False, fill=False)
251 self.set_save_button_state()
252
253 return main_vbox
254
255 def create_output_page(self):
256 advanced_vbox = gtk.VBox(False, 6)
257 advanced_vbox.set_border_width(6)
258
259 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Package format</span>'), expand=False, fill=False)
260 sub_vbox = gtk.VBox(False, 6)
261 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
262 tooltip_combo = "Selects the package format used to generate rootfs."
263 tooltip_extra = "Selects extra package formats to build"
264 pkgfmt_widget, self.rootfs_combo, self.check_hbox = self.gen_pkgfmt_widget(self.configuration.curr_package_format, self.all_package_formats,"<b>Root file system package format</b>" + "*" + tooltip_combo,"<b>Additional package formats</b>" + "*" + tooltip_extra)
265 sub_vbox.pack_start(pkgfmt_widget, expand=False, fill=False)
266
267 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Image size</span>'), expand=False, fill=False)
268 sub_vbox = gtk.VBox(False, 6)
269 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
270 label = self.gen_label_widget("Image basic size (in MB)")
271 tooltip = "Defines the size for the generated image. The OpenEmbedded build system determines the final size for the generated image using an algorithm that takes into account the initial disk space used for the generated image, the Image basic size value, and the Additional free space value.\n\nFor more information, check the <a href=\"http://www.yoctoproject.org/docs/current/poky-ref-manual/poky-ref-manual.html#var-IMAGE_ROOTFS_SIZE\">Yocto Project Reference Manual</a>."
272 rootfs_size_widget, self.rootfs_size_spinner = self.gen_spinner_widget(int(self.configuration.image_rootfs_size*1.0/1024), 0, 65536,"<b>Image basic size</b>" + "*" + tooltip)
273 sub_vbox.pack_start(label, expand=False, fill=False)
274 sub_vbox.pack_start(rootfs_size_widget, expand=False, fill=False)
275
276 sub_vbox = gtk.VBox(False, 6)
277 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
278 label = self.gen_label_widget("Additional free space (in MB)")
279 tooltip = "Sets extra free disk space to be added to the generated image. Use this variable when you want to ensure that a specific amount of free disk space is available on a device after an image is installed and running."
280 extra_size_widget, self.extra_size_spinner = self.gen_spinner_widget(int(self.configuration.image_extra_size*1.0/1024), 0, 65536,"<b>Additional free space</b>" + "*" + tooltip)
281 sub_vbox.pack_start(label, expand=False, fill=False)
282 sub_vbox.pack_start(extra_size_widget, expand=False, fill=False)
283
284 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Licensing</span>'), expand=False, fill=False)
285 self.gplv3_checkbox = gtk.CheckButton("Exclude GPLv3 packages")
286 self.gplv3_checkbox.set_tooltip_text("Check this box to prevent GPLv3 packages from being included in your image")
287 if "GPLv3" in self.configuration.incompat_license.split():
288 self.gplv3_checkbox.set_active(True)
289 else:
290 self.gplv3_checkbox.set_active(False)
291 advanced_vbox.pack_start(self.gplv3_checkbox, expand=False, fill=False)
292
293 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">SDK</span>'), expand=False, fill=False)
294 sub_hbox = gtk.HBox(False, 6)
295 advanced_vbox.pack_start(sub_hbox, expand=False, fill=False)
296 self.sdk_checkbox = gtk.CheckButton("Populate SDK")
297 tooltip = "Check this box to generate an SDK tarball that consists of the cross-toolchain and a sysroot that contains development packages for your image."
298 self.sdk_checkbox.set_tooltip_text(tooltip)
299 self.sdk_checkbox.set_active(self.configuration.toolchain_build)
300 sub_hbox.pack_start(self.sdk_checkbox, expand=False, fill=False)
301
302 tooltip = "Select the host platform for which you want to run the toolchain contained in the SDK tarball."
303 sdk_machine_widget, self.sdk_machine_combo = self.gen_combo_widget(self.configuration.curr_sdk_machine, self.all_sdk_machines,"<b>Populate SDK</b>" + "*" + tooltip)
304 sub_hbox.pack_start(sdk_machine_widget, expand=False, fill=False)
305
306 return advanced_vbox
307
308 def response_cb(self, dialog, response_id):
309 package_format = []
310 package_format.append(self.rootfs_combo.get_active_text())
311 for child in self.check_hbox:
312 if isinstance(child, gtk.CheckButton) and child.get_active():
313 package_format.append(child.get_label())
314 self.configuration.curr_package_format = " ".join(package_format)
315
316 distro = self.distro_combo.get_active_text()
317 if distro == "Default":
318 distro = "defaultsetup"
319 self.configuration.curr_distro = distro
320 self.configuration.image_rootfs_size = self.rootfs_size_spinner.get_value_as_int() * 1024
321 self.configuration.image_extra_size = self.extra_size_spinner.get_value_as_int() * 1024
322
323 self.configuration.image_fstypes = ""
324 for image_type in self.image_types:
325 if self.image_types_checkbuttons[image_type].get_active():
326 self.configuration.image_fstypes += (" " + image_type)
327 self.configuration.image_fstypes.strip()
328
329 if self.gplv3_checkbox.get_active():
330 if "GPLv3" not in self.configuration.incompat_license.split():
331 self.configuration.incompat_license += " GPLv3"
332 else:
333 if "GPLv3" in self.configuration.incompat_license.split():
334 self.configuration.incompat_license = self.configuration.incompat_license.split().remove("GPLv3")
335 self.configuration.incompat_license = " ".join(self.configuration.incompat_license or [])
336 self.configuration.incompat_license = self.configuration.incompat_license.strip()
337
338 self.configuration.toolchain_build = self.sdk_checkbox.get_active()
339 self.configuration.curr_sdk_machine = self.sdk_machine_combo.get_active_text()
340 md5 = self.config_md5()
341 self.settings_changed = (self.md5 != md5)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
new file mode 100644
index 0000000000..c679f9a070
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
@@ -0,0 +1,44 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24
25"""
26The following are convenience classes for implementing GNOME HIG compliant
27BitBake GUI's
28In summary: spacing = 12px, border-width = 6px
29"""
30
31class CrumbsDialog(gtk.Dialog):
32 """
33 A GNOME HIG compliant dialog widget.
34 Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
35 """
36 def __init__(self, title="", parent=None, flags=0, buttons=None):
37 super(CrumbsDialog, self).__init__(title, parent, flags, buttons)
38
39 self.set_property("has-separator", False) # note: deprecated in 2.22
40
41 self.set_border_width(6)
42 self.vbox.set_property("spacing", 12)
43 self.action_area.set_property("spacing", 12)
44 self.action_area.set_property("border-width", 6)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py b/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
new file mode 100644
index 0000000000..3b998e4637
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
@@ -0,0 +1,70 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import glib
24import gtk
25from bb.ui.crumbs.hobwidget import HobIconChecker
26from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
27
28"""
29The following are convenience classes for implementing GNOME HIG compliant
30BitBake GUI's
31In summary: spacing = 12px, border-width = 6px
32"""
33
34class CrumbsMessageDialog(gtk.MessageDialog):
35 """
36 A GNOME HIG compliant dialog widget.
37 Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
38 """
39 def __init__(self, parent = None, label="", dialog_type = gtk.MESSAGE_QUESTION, msg=""):
40 super(CrumbsMessageDialog, self).__init__(None,
41 gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
42 dialog_type,
43 gtk.BUTTONS_NONE,
44 None)
45
46 self.set_skip_taskbar_hint(False)
47
48 self.set_markup(label)
49
50 if 0 <= len(msg) < 300:
51 self.format_secondary_markup(msg)
52 else:
53 vbox = self.get_message_area()
54 vbox.set_border_width(1)
55 vbox.set_property("spacing", 12)
56 self.textWindow = gtk.ScrolledWindow()
57 self.textWindow.set_shadow_type(gtk.SHADOW_IN)
58 self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
59 self.msgView = gtk.TextView()
60 self.msgView.set_editable(False)
61 self.msgView.set_wrap_mode(gtk.WRAP_WORD)
62 self.msgView.set_cursor_visible(False)
63 self.msgView.set_size_request(300, 300)
64 self.buf = gtk.TextBuffer()
65 self.buf.set_text(msg)
66 self.msgView.set_buffer(self.buf)
67 self.textWindow.add(self.msgView)
68 self.msgView.show()
69 vbox.add(self.textWindow)
70 self.textWindow.show()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py b/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
new file mode 100644
index 0000000000..a13fff906a
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
@@ -0,0 +1,219 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import glob
24import gtk
25import gobject
26import os
27import re
28import shlex
29import subprocess
30import tempfile
31from bb.ui.crumbs.hobwidget import hic, HobButton
32from bb.ui.crumbs.progressbar import HobProgressBar
33import bb.ui.crumbs.utils
34import bb.process
35from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
36from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
37
38"""
39The following are convenience classes for implementing GNOME HIG compliant
40BitBake GUI's
41In summary: spacing = 12px, border-width = 6px
42"""
43
44class DeployImageDialog (CrumbsDialog):
45
46 __dummy_usb__ = "--select a usb drive--"
47
48 def __init__(self, title, image_path, parent, flags, buttons=None, standalone=False):
49 super(DeployImageDialog, self).__init__(title, parent, flags, buttons)
50
51 self.image_path = image_path
52 self.standalone = standalone
53
54 self.create_visual_elements()
55 self.connect("response", self.response_cb)
56
57 def create_visual_elements(self):
58 self.set_size_request(600, 400)
59 label = gtk.Label()
60 label.set_alignment(0.0, 0.5)
61 markup = "<span font_desc='12'>The image to be written into usb drive:</span>"
62 label.set_markup(markup)
63 self.vbox.pack_start(label, expand=False, fill=False, padding=2)
64
65 table = gtk.Table(2, 10, False)
66 table.set_col_spacings(5)
67 table.set_row_spacings(5)
68 self.vbox.pack_start(table, expand=True, fill=True)
69
70 scroll = gtk.ScrolledWindow()
71 scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
72 scroll.set_shadow_type(gtk.SHADOW_IN)
73 tv = gtk.TextView()
74 tv.set_editable(False)
75 tv.set_wrap_mode(gtk.WRAP_WORD)
76 tv.set_cursor_visible(False)
77 self.buf = gtk.TextBuffer()
78 self.buf.set_text(self.image_path)
79 tv.set_buffer(self.buf)
80 scroll.add(tv)
81 table.attach(scroll, 0, 10, 0, 1)
82
83 # There are 2 ways to use DeployImageDialog
84 # One way is that called by HOB when the 'Deploy Image' button is clicked
85 # The other way is that called by a standalone script.
86 # Following block of codes handles the latter way. It adds a 'Select Image' button and
87 # emit a signal when the button is clicked.
88 if self.standalone:
89 gobject.signal_new("select_image_clicked", self, gobject.SIGNAL_RUN_FIRST,
90 gobject.TYPE_NONE, ())
91 icon = gtk.Image()
92 pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_IMAGES_DISPLAY_FILE)
93 icon.set_from_pixbuf(pix_buffer)
94 button = gtk.Button("Select Image")
95 button.set_image(icon)
96 #button.set_size_request(140, 50)
97 table.attach(button, 9, 10, 1, 2, gtk.FILL, 0, 0, 0)
98 button.connect("clicked", self.select_image_button_clicked_cb)
99
100 separator = gtk.HSeparator()
101 self.vbox.pack_start(separator, expand=False, fill=False, padding=10)
102
103 self.usb_desc = gtk.Label()
104 self.usb_desc.set_alignment(0.0, 0.5)
105 markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
106 self.usb_desc.set_markup(markup)
107
108 self.usb_combo = gtk.combo_box_new_text()
109 self.usb_combo.connect("changed", self.usb_combo_changed_cb)
110 model = self.usb_combo.get_model()
111 model.clear()
112 self.usb_combo.append_text(self.__dummy_usb__)
113 for usb in self.find_all_usb_devices():
114 self.usb_combo.append_text("/dev/" + usb)
115 self.usb_combo.set_active(0)
116 self.vbox.pack_start(self.usb_combo, expand=False, fill=False)
117 self.vbox.pack_start(self.usb_desc, expand=False, fill=False, padding=2)
118
119 self.progress_bar = HobProgressBar()
120 self.vbox.pack_start(self.progress_bar, expand=False, fill=False)
121 separator = gtk.HSeparator()
122 self.vbox.pack_start(separator, expand=False, fill=True, padding=10)
123
124 self.vbox.show_all()
125 self.progress_bar.hide()
126
127 def set_image_text_buffer(self, image_path):
128 self.buf.set_text(image_path)
129
130 def set_image_path(self, image_path):
131 self.image_path = image_path
132
133 def popen_read(self, cmd):
134 tmpout, errors = bb.process.run("%s" % cmd)
135 return tmpout.strip()
136
137 def find_all_usb_devices(self):
138 usb_devs = [ os.readlink(u)
139 for u in glob.glob('/dev/disk/by-id/usb*')
140 if not re.search(r'part\d+', u) ]
141 return [ '%s' % u[u.rfind('/')+1:] for u in usb_devs ]
142
143 def get_usb_info(self, dev):
144 return "%s %s" % \
145 (self.popen_read('cat /sys/class/block/%s/device/vendor' % dev),
146 self.popen_read('cat /sys/class/block/%s/device/model' % dev))
147
148 def select_image_button_clicked_cb(self, button):
149 self.emit('select_image_clicked')
150
151 def usb_combo_changed_cb(self, usb_combo):
152 combo_item = self.usb_combo.get_active_text()
153 if not combo_item or combo_item == self.__dummy_usb__:
154 markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
155 self.usb_desc.set_markup(markup)
156 else:
157 markup = "<span font_desc='12'>" + self.get_usb_info(combo_item.lstrip("/dev/")) + "</span>"
158 self.usb_desc.set_markup(markup)
159
160 def response_cb(self, dialog, response_id):
161 if response_id == gtk.RESPONSE_YES:
162 lbl = ''
163 msg = ''
164 combo_item = self.usb_combo.get_active_text()
165 if combo_item and combo_item != self.__dummy_usb__ and self.image_path:
166 cmdline = bb.ui.crumbs.utils.which_terminal()
167 if cmdline:
168 tmpfile = tempfile.NamedTemporaryFile()
169 cmdline += "\"sudo dd if=" + self.image_path + \
170 " of=" + combo_item + " && sync; echo $? > " + tmpfile.name + "\""
171 subprocess.call(shlex.split(cmdline))
172
173 if int(tmpfile.readline().strip()) == 0:
174 lbl = "<b>Deploy image successfully.</b>"
175 else:
176 lbl = "<b>Failed to deploy image.</b>"
177 msg = "Please check image <b>%s</b> exists and USB device <b>%s</b> is writable." % (self.image_path, combo_item)
178 tmpfile.close()
179 else:
180 if not self.image_path:
181 lbl = "<b>No selection made.</b>"
182 msg = "You have not selected an image to deploy."
183 else:
184 lbl = "<b>No selection made.</b>"
185 msg = "You have not selected a USB device."
186 if len(lbl):
187 crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
188 button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
189 HobButton.style_button(button)
190 crumbs_dialog.run()
191 crumbs_dialog.destroy()
192
193 def update_progress_bar(self, title, fraction, status=None):
194 self.progress_bar.update(fraction)
195 self.progress_bar.set_title(title)
196 self.progress_bar.set_rcstyle(status)
197
198 def write_file(self, ifile, ofile):
199 self.progress_bar.reset()
200 self.progress_bar.show()
201
202 f_from = os.open(ifile, os.O_RDONLY)
203 f_to = os.open(ofile, os.O_WRONLY)
204
205 total_size = os.stat(ifile).st_size
206 written_size = 0
207
208 while True:
209 buf = os.read(f_from, 1024*1024)
210 if not buf:
211 break
212 os.write(f_to, buf)
213 written_size += 1024*1024
214 self.update_progress_bar("Writing to usb:", written_size * 1.0/total_size)
215
216 self.update_progress_bar("Writing completed:", 1.0)
217 os.close(f_from)
218 os.close(f_to)
219 self.progress_bar.hide()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py b/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
new file mode 100644
index 0000000000..21216adc97
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
@@ -0,0 +1,172 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import gobject
25import os
26from bb.ui.crumbs.hobwidget import HobViewTable, HobInfoButton, HobButton, HobAltButton
27from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
28from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
29
30"""
31The following are convenience classes for implementing GNOME HIG compliant
32BitBake GUI's
33In summary: spacing = 12px, border-width = 6px
34"""
35
36class ImageSelectionDialog (CrumbsDialog):
37
38 __columns__ = [{
39 'col_name' : 'Image name',
40 'col_id' : 0,
41 'col_style': 'text',
42 'col_min' : 400,
43 'col_max' : 400
44 }, {
45 'col_name' : 'Select',
46 'col_id' : 1,
47 'col_style': 'radio toggle',
48 'col_min' : 160,
49 'col_max' : 160
50 }]
51
52
53 def __init__(self, image_folder, image_types, title, parent, flags, buttons=None, image_extension = {}):
54 super(ImageSelectionDialog, self).__init__(title, parent, flags, buttons)
55 self.connect("response", self.response_cb)
56
57 self.image_folder = image_folder
58 self.image_types = image_types
59 self.image_list = []
60 self.image_names = []
61 self.image_extension = image_extension
62
63 # create visual elements on the dialog
64 self.create_visual_elements()
65
66 self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
67 self.fill_image_store()
68
69 def create_visual_elements(self):
70 hbox = gtk.HBox(False, 6)
71
72 self.vbox.pack_start(hbox, expand=False, fill=False)
73
74 entry = gtk.Entry()
75 entry.set_text(self.image_folder)
76 table = gtk.Table(1, 10, True)
77 table.set_size_request(560, -1)
78 hbox.pack_start(table, expand=False, fill=False)
79 table.attach(entry, 0, 9, 0, 1)
80 image = gtk.Image()
81 image.set_from_stock(gtk.STOCK_OPEN, gtk.ICON_SIZE_BUTTON)
82 open_button = gtk.Button()
83 open_button.set_image(image)
84 open_button.connect("clicked", self.select_path_cb, self, entry)
85 table.attach(open_button, 9, 10, 0, 1)
86
87 self.image_table = HobViewTable(self.__columns__, "Images")
88 self.image_table.set_size_request(-1, 300)
89 self.image_table.connect("toggled", self.toggled_cb)
90 self.image_table.connect_group_selection(self.table_selected_cb)
91 self.image_table.connect("row-activated", self.row_actived_cb)
92 self.vbox.pack_start(self.image_table, expand=True, fill=True)
93
94 self.show_all()
95
96 def change_image_cb(self, model, path, columnid):
97 if not model:
98 return
99 iter = model.get_iter_first()
100 while iter:
101 rowpath = model.get_path(iter)
102 model[rowpath][columnid] = False
103 iter = model.iter_next(iter)
104
105 model[path][columnid] = True
106
107 def toggled_cb(self, table, cell, path, columnid, tree):
108 model = tree.get_model()
109 self.change_image_cb(model, path, columnid)
110
111 def table_selected_cb(self, selection):
112 model, paths = selection.get_selected_rows()
113 if paths:
114 self.change_image_cb(model, paths[0], 1)
115
116 def row_actived_cb(self, tab, model, path):
117 self.change_image_cb(model, path, 1)
118 self.emit('response', gtk.RESPONSE_YES)
119
120 def select_path_cb(self, action, parent, entry):
121 dialog = gtk.FileChooserDialog("", parent,
122 gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
123 text = entry.get_text()
124 dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
125 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
126 HobAltButton.style_button(button)
127 button = dialog.add_button("Open", gtk.RESPONSE_YES)
128 HobButton.style_button(button)
129 response = dialog.run()
130 if response == gtk.RESPONSE_YES:
131 path = dialog.get_filename()
132 entry.set_text(path)
133 self.image_folder = path
134 self.fill_image_store()
135
136 dialog.destroy()
137
138 def fill_image_store(self):
139 self.image_list = []
140 self.image_store.clear()
141 imageset = set()
142 for root, dirs, files in os.walk(self.image_folder):
143 # ignore the sub directories
144 dirs[:] = []
145 for f in files:
146 for image_type in self.image_types:
147 if image_type in self.image_extension:
148 real_types = self.image_extension[image_type]
149 else:
150 real_types = [image_type]
151 for real_image_type in real_types:
152 if f.endswith('.' + real_image_type):
153 imageset.add(f.rsplit('.' + real_image_type)[0].rsplit('.rootfs')[0])
154 self.image_list.append(f)
155
156 for image in imageset:
157 self.image_store.set(self.image_store.append(), 0, image, 1, False)
158
159 self.image_table.set_model(self.image_store)
160
161 def response_cb(self, dialog, response_id):
162 self.image_names = []
163 if response_id == gtk.RESPONSE_YES:
164 iter = self.image_store.get_iter_first()
165 while iter:
166 path = self.image_store.get_path(iter)
167 if self.image_store[path][1]:
168 for f in self.image_list:
169 if f.startswith(self.image_store[path][0] + '.'):
170 self.image_names.append(f)
171 break
172 iter = self.image_store.iter_next(iter)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py b/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
new file mode 100644
index 0000000000..52d57b6738
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
@@ -0,0 +1,298 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import gobject
25import os
26import tempfile
27from bb.ui.crumbs.hobwidget import hic, HobButton, HobAltButton
28from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
29from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
30
31"""
32The following are convenience classes for implementing GNOME HIG compliant
33BitBake GUI's
34In summary: spacing = 12px, border-width = 6px
35"""
36
37class CellRendererPixbufActivatable(gtk.CellRendererPixbuf):
38 """
39 A custom CellRenderer implementation which is activatable
40 so that we can handle user clicks
41 """
42 __gsignals__ = { 'clicked' : (gobject.SIGNAL_RUN_LAST,
43 gobject.TYPE_NONE,
44 (gobject.TYPE_STRING,)), }
45
46 def __init__(self):
47 gtk.CellRendererPixbuf.__init__(self)
48 self.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
49 self.set_property('follow-state', True)
50
51 """
52 Respond to a user click on a cell
53 """
54 def do_activate(self, even, widget, path, background_area, cell_area, flags):
55 self.emit('clicked', path)
56
57#
58# LayerSelectionDialog
59#
60class LayerSelectionDialog (CrumbsDialog):
61
62 TARGETS = [
63 ("MY_TREE_MODEL_ROW", gtk.TARGET_SAME_WIDGET, 0),
64 ("text/plain", 0, 1),
65 ("TEXT", 0, 2),
66 ("STRING", 0, 3),
67 ]
68
69 def gen_label_widget(self, content):
70 label = gtk.Label()
71 label.set_alignment(0, 0)
72 label.set_markup(content)
73 label.show()
74 return label
75
76 def layer_widget_toggled_cb(self, cell, path, layer_store):
77 name = layer_store[path][0]
78 toggle = not layer_store[path][1]
79 layer_store[path][1] = toggle
80
81 def layer_widget_add_clicked_cb(self, action, layer_store, parent):
82 dialog = gtk.FileChooserDialog("Add new layer", parent,
83 gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
84 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
85 HobAltButton.style_button(button)
86 button = dialog.add_button("Open", gtk.RESPONSE_YES)
87 HobButton.style_button(button)
88 label = gtk.Label("Select the layer you wish to add")
89 label.show()
90 dialog.set_extra_widget(label)
91 response = dialog.run()
92 path = dialog.get_filename()
93 dialog.destroy()
94
95 lbl = "<b>Error</b>"
96 msg = "Unable to load layer <i>%s</i> because " % path
97 if response == gtk.RESPONSE_YES:
98 import os
99 import os.path
100 layers = []
101 it = layer_store.get_iter_first()
102 while it:
103 layers.append(layer_store.get_value(it, 0))
104 it = layer_store.iter_next(it)
105
106 if not path:
107 msg += "it is an invalid path."
108 elif not os.path.exists(path+"/conf/layer.conf"):
109 msg += "there is no layer.conf inside the directory."
110 elif path in layers:
111 msg += "it is already in loaded layers."
112 else:
113 layer_store.append([path])
114 return
115 dialog = CrumbsMessageDialog(parent, lbl, gtk.MESSAGE_ERROR, msg)
116 dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
117 response = dialog.run()
118 dialog.destroy()
119
120 def layer_widget_del_clicked_cb(self, action, tree_selection, layer_store):
121 model, iter = tree_selection.get_selected()
122 if iter:
123 layer_store.remove(iter)
124
125
126 def gen_layer_widget(self, layers, layers_avail, window, tooltip=""):
127 hbox = gtk.HBox(False, 6)
128
129 layer_tv = gtk.TreeView()
130 layer_tv.set_rules_hint(True)
131 layer_tv.set_headers_visible(False)
132 tree_selection = layer_tv.get_selection()
133 tree_selection.set_mode(gtk.SELECTION_SINGLE)
134
135 # Allow enable drag and drop of rows including row move
136 dnd_internal_target = ''
137 dnd_targets = [(dnd_internal_target, gtk.TARGET_SAME_WIDGET, 0)]
138 layer_tv.enable_model_drag_source( gtk.gdk.BUTTON1_MASK,
139 dnd_targets,
140 gtk.gdk.ACTION_MOVE)
141 layer_tv.enable_model_drag_dest(dnd_targets,
142 gtk.gdk.ACTION_MOVE)
143 layer_tv.connect("drag_data_get", self.drag_data_get_cb)
144 layer_tv.connect("drag_data_received", self.drag_data_received_cb)
145
146 col0= gtk.TreeViewColumn('Path')
147 cell0 = gtk.CellRendererText()
148 cell0.set_padding(5,2)
149 col0.pack_start(cell0, True)
150 col0.set_cell_data_func(cell0, self.draw_layer_path_cb)
151 layer_tv.append_column(col0)
152
153 scroll = gtk.ScrolledWindow()
154 scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
155 scroll.set_shadow_type(gtk.SHADOW_IN)
156 scroll.add(layer_tv)
157
158 table_layer = gtk.Table(2, 10, False)
159 hbox.pack_start(table_layer, expand=True, fill=True)
160
161 table_layer.attach(scroll, 0, 10, 0, 1)
162
163 layer_store = gtk.ListStore(gobject.TYPE_STRING)
164 for layer in layers:
165 layer_store.append([layer])
166
167 col1 = gtk.TreeViewColumn('Enabled')
168 layer_tv.append_column(col1)
169
170 cell1 = CellRendererPixbufActivatable()
171 cell1.set_fixed_size(-1,35)
172 cell1.connect("clicked", self.del_cell_clicked_cb, layer_store)
173 col1.pack_start(cell1, True)
174 col1.set_cell_data_func(cell1, self.draw_delete_button_cb, layer_tv)
175
176 add_button = gtk.Button()
177 add_button.set_relief(gtk.RELIEF_NONE)
178 box = gtk.HBox(False, 6)
179 box.show()
180 add_button.add(box)
181 add_button.connect("enter-notify-event", self.add_hover_cb)
182 add_button.connect("leave-notify-event", self.add_leave_cb)
183 self.im = gtk.Image()
184 self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
185 self.im.show()
186 box.pack_start(self.im, expand=False, fill=False, padding=6)
187 lbl = gtk.Label("Add layer")
188 lbl.set_alignment(0.0, 0.5)
189 lbl.show()
190 box.pack_start(lbl, expand=True, fill=True, padding=6)
191 add_button.connect("clicked", self.layer_widget_add_clicked_cb, layer_store, window)
192 table_layer.attach(add_button, 0, 10, 1, 2, gtk.EXPAND | gtk.FILL, 0, 0, 6)
193 layer_tv.set_model(layer_store)
194
195 hbox.show_all()
196
197 return hbox, layer_store
198
199 def drag_data_get_cb(self, treeview, context, selection, target_id, etime):
200 treeselection = treeview.get_selection()
201 model, iter = treeselection.get_selected()
202 data = model.get_value(iter, 0)
203 selection.set(selection.target, 8, data)
204
205 def drag_data_received_cb(self, treeview, context, x, y, selection, info, etime):
206 model = treeview.get_model()
207 data = selection.data
208 drop_info = treeview.get_dest_row_at_pos(x, y)
209 if drop_info:
210 path, position = drop_info
211 iter = model.get_iter(path)
212 if (position == gtk.TREE_VIEW_DROP_BEFORE or position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE):
213 model.insert_before(iter, [data])
214 else:
215 model.insert_after(iter, [data])
216 else:
217 model.append([data])
218 if context.action == gtk.gdk.ACTION_MOVE:
219 context.finish(True, True, etime)
220 return
221
222 def add_hover_cb(self, button, event):
223 self.im.set_from_file(hic.ICON_INDI_ADD_HOVER_FILE)
224
225 def add_leave_cb(self, button, event):
226 self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
227
228 def __init__(self, title, layers, layers_non_removable, all_layers, parent, flags, buttons=None):
229 super(LayerSelectionDialog, self).__init__(title, parent, flags, buttons)
230
231 # class members from other objects
232 self.layers = layers
233 self.layers_non_removable = layers_non_removable
234 self.all_layers = all_layers
235 self.layers_changed = False
236
237 # icon for remove button in TreeView
238 im = gtk.Image()
239 im.set_from_file(hic.ICON_INDI_REMOVE_FILE)
240 self.rem_icon = im.get_pixbuf()
241
242 # class members for internal use
243 self.layer_store = None
244
245 # create visual elements on the dialog
246 self.create_visual_elements()
247 self.connect("response", self.response_cb)
248
249 def create_visual_elements(self):
250 layer_widget, self.layer_store = self.gen_layer_widget(self.layers, self.all_layers, self, None)
251 layer_widget.set_size_request(450, 250)
252 self.vbox.pack_start(layer_widget, expand=True, fill=True)
253 self.show_all()
254
255 def response_cb(self, dialog, response_id):
256 model = self.layer_store
257 it = model.get_iter_first()
258 layers = []
259 while it:
260 layers.append(model.get_value(it, 0))
261 it = model.iter_next(it)
262
263 self.layers_changed = (self.layers != layers)
264 self.layers = layers
265
266 """
267 A custom cell_data_func to draw a delete 'button' in the TreeView for layers
268 other than the meta layer. The deletion of which is prevented so that the
269 user can't shoot themselves in the foot too badly.
270 """
271 def draw_delete_button_cb(self, col, cell, model, it, tv):
272 path = model.get_value(it, 0)
273 if path in self.layers_non_removable:
274 cell.set_sensitive(False)
275 cell.set_property('pixbuf', None)
276 cell.set_property('mode', gtk.CELL_RENDERER_MODE_INERT)
277 else:
278 cell.set_property('pixbuf', self.rem_icon)
279 cell.set_sensitive(True)
280 cell.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
281
282 return True
283
284 """
285 A custom cell_data_func to write an extra message into the layer path cell
286 for the meta layer. We should inform the user that they can't remove it for
287 their own safety.
288 """
289 def draw_layer_path_cb(self, col, cell, model, it):
290 path = model.get_value(it, 0)
291 if path in self.layers_non_removable:
292 cell.set_property('markup', "<b>It cannot be removed</b>\n%s" % path)
293 else:
294 cell.set_property('text', path)
295
296 def del_cell_clicked_cb(self, cell, path, model):
297 it = model.get_iter_from_string(path)
298 model.remove(it)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py
new file mode 100644
index 0000000000..33bac39db8
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py
@@ -0,0 +1,163 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22import gobject
23from bb.ui.crumbs.hobwidget import HobAltButton
24from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
25
26"""
27The following are convenience classes for implementing GNOME HIG compliant
28BitBake GUI's
29In summary: spacing = 12px, border-width = 6px
30"""
31
32#
33# ParsingWarningsDialog
34#
35class ParsingWarningsDialog (CrumbsDialog):
36
37 def __init__(self, title, warnings, parent, flags, buttons=None):
38 super(ParsingWarningsDialog, self).__init__(title, parent, flags, buttons)
39
40 self.warnings = warnings
41 self.warning_on = 0
42 self.warn_nb = len(warnings)
43
44 # create visual elements on the dialog
45 self.create_visual_elements()
46
47 def cancel_button_cb(self, button):
48 self.destroy()
49
50 def previous_button_cb(self, button):
51 self.warning_on = self.warning_on - 1
52 self.refresh_components()
53
54 def next_button_cb(self, button):
55 self.warning_on = self.warning_on + 1
56 self.refresh_components()
57
58 def refresh_components(self):
59 lbl = self.warnings[self.warning_on]
60 #when the warning text has more than 400 chars, it uses a scroll bar
61 if 0<= len(lbl) < 400:
62 self.warning_label.set_size_request(320, 230)
63 self.warning_label.set_use_markup(True)
64 self.warning_label.set_line_wrap(True)
65 self.warning_label.set_markup(lbl)
66 self.warning_label.set_property("yalign", 0.00)
67 else:
68 self.textWindow.set_shadow_type(gtk.SHADOW_IN)
69 self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
70 self.msgView = gtk.TextView()
71 self.msgView.set_editable(False)
72 self.msgView.set_wrap_mode(gtk.WRAP_WORD)
73 self.msgView.set_cursor_visible(False)
74 self.msgView.set_size_request(320, 230)
75 self.buf = gtk.TextBuffer()
76 self.buf.set_text(lbl)
77 self.msgView.set_buffer(self.buf)
78 self.textWindow.add(self.msgView)
79 self.msgView.show()
80
81 if self.warning_on==0:
82 self.previous_button.set_sensitive(False)
83 else:
84 self.previous_button.set_sensitive(True)
85
86 if self.warning_on==self.warn_nb-1:
87 self.next_button.set_sensitive(False)
88 else:
89 self.next_button.set_sensitive(True)
90
91 if self.warn_nb>1:
92 self.heading = "Warning " + str(self.warning_on + 1) + " of " + str(self.warn_nb)
93 self.heading_label.set_markup('<span weight="bold">%s</span>' % self.heading)
94 else:
95 self.heading = "Warning"
96 self.heading_label.set_markup('<span weight="bold">%s</span>' % self.heading)
97
98 self.show_all()
99
100 if 0<= len(lbl) < 400:
101 self.textWindow.hide()
102 else:
103 self.warning_label.hide()
104
105 def create_visual_elements(self):
106 self.set_size_request(350, 350)
107 self.heading_label = gtk.Label()
108 self.heading_label.set_alignment(0, 0)
109 self.warning_label = gtk.Label()
110 self.warning_label.set_selectable(True)
111 self.warning_label.set_alignment(0, 0)
112 self.textWindow = gtk.ScrolledWindow()
113
114 table = gtk.Table(1, 10, False)
115
116 cancel_button = gtk.Button()
117 cancel_button.set_label("Close")
118 cancel_button.connect("clicked", self.cancel_button_cb)
119 cancel_button.set_size_request(110, 30)
120
121 self.previous_button = gtk.Button()
122 image1 = gtk.image_new_from_stock(gtk.STOCK_GO_BACK, gtk.ICON_SIZE_BUTTON)
123 image1.show()
124 box = gtk.HBox(False, 6)
125 box.show()
126 self.previous_button.add(box)
127 lbl = gtk.Label("Previous")
128 lbl.show()
129 box.pack_start(image1, expand=False, fill=False, padding=3)
130 box.pack_start(lbl, expand=True, fill=True, padding=3)
131 self.previous_button.connect("clicked", self.previous_button_cb)
132 self.previous_button.set_size_request(110, 30)
133
134 self.next_button = gtk.Button()
135 image2 = gtk.image_new_from_stock(gtk.STOCK_GO_FORWARD, gtk.ICON_SIZE_BUTTON)
136 image2.show()
137 box = gtk.HBox(False, 6)
138 box.show()
139 self.next_button.add(box)
140 lbl = gtk.Label("Next")
141 lbl.show()
142 box.pack_start(lbl, expand=True, fill=True, padding=3)
143 box.pack_start(image2, expand=False, fill=False, padding=3)
144 self.next_button.connect("clicked", self.next_button_cb)
145 self.next_button.set_size_request(110, 30)
146
147 #when there more than one warning, we need "previous" and "next" button
148 if self.warn_nb>1:
149 self.vbox.pack_start(self.heading_label, expand=False, fill=False)
150 self.vbox.pack_start(self.warning_label, expand=False, fill=False)
151 self.vbox.pack_start(self.textWindow, expand=False, fill=False)
152 table.attach(cancel_button, 6, 7, 0, 1, xoptions=gtk.SHRINK)
153 table.attach(self.previous_button, 7, 8, 0, 1, xoptions=gtk.SHRINK)
154 table.attach(self.next_button, 8, 9, 0, 1, xoptions=gtk.SHRINK)
155 self.vbox.pack_end(table, expand=False, fill=False)
156 else:
157 self.vbox.pack_start(self.heading_label, expand=False, fill=False)
158 self.vbox.pack_start(self.warning_label, expand=False, fill=False)
159 self.vbox.pack_start(self.textWindow, expand=False, fill=False)
160 cancel_button = self.add_button("Close", gtk.RESPONSE_CANCEL)
161 HobAltButton.style_button(cancel_button)
162
163 self.refresh_components()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py b/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
new file mode 100644
index 0000000000..09b9ce6de3
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
@@ -0,0 +1,437 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2013 Intel Corporation
5#
6# Authored by Andrei Dinu <andrei.adrianx.dinu@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import string
22import gtk
23import gobject
24import os
25import tempfile
26import glib
27from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
28from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
29from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
30from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
31
32"""
33The following are convenience classes for implementing GNOME HIG compliant
34BitBake GUI's
35In summary: spacing = 12px, border-width = 6px
36"""
37
38class PropertyDialog(CrumbsDialog):
39
40 def __init__(self, title, parent, information, flags, buttons=None):
41
42 super(PropertyDialog, self).__init__(title, parent, flags, buttons)
43
44 self.properties = information
45
46 if len(self.properties) == 10:
47 self.create_recipe_visual_elements()
48 elif len(self.properties) == 5:
49 self.create_package_visual_elements()
50 else:
51 self.create_information_visual_elements()
52
53
54 def create_information_visual_elements(self):
55
56 HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("icons/"))
57 ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
58
59 self.set_resizable(False)
60
61 self.table = gtk.Table(1,1,False)
62 self.table.set_row_spacings(0)
63 self.table.set_col_spacings(0)
64
65 self.image = gtk.Image()
66 self.image.set_from_file(ICON_PACKAGES_DISPLAY_FILE)
67 self.image.set_property("xalign",0)
68 #self.vbox.add(self.image)
69
70 image_info = self.properties.split("*")[0]
71 info = self.properties.split("*")[1]
72
73 vbox = gtk.VBox(True, spacing=30)
74
75 self.label_short = gtk.Label()
76 self.label_short.set_line_wrap(False)
77 self.label_short.set_markup(image_info)
78 self.label_short.set_property("xalign", 0)
79
80 self.info_label = gtk.Label()
81 self.info_label.set_line_wrap(True)
82 self.info_label.set_markup(info)
83 self.info_label.set_property("yalign", 0.5)
84
85 self.table.attach(self.image, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=5,ypadding=5)
86 self.table.attach(self.label_short, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=5)
87 self.table.attach(self.info_label, 0,1,1,2, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=10)
88
89 self.vbox.add(self.table)
90 self.connect('delete-event', lambda w, e: self.destroy() or True)
91
92 def treeViewTooltip( self, widget, e, tooltips, cell, emptyText="" ):
93 try:
94 (path,col,x,y) = widget.get_path_at_pos( int(e.x), int(e.y) )
95 it = widget.get_model().get_iter(path)
96 value = widget.get_model().get_value(it,cell)
97 if value in self.tooltip_items:
98 tooltips.set_tip(widget, self.tooltip_items[value])
99 tooltips.enable()
100 else:
101 tooltips.set_tip(widget, emptyText)
102 except:
103 tooltips.set_tip(widget, emptyText)
104
105
106 def create_package_visual_elements(self):
107
108 import json
109
110 name = self.properties['name']
111 binb = self.properties['binb']
112 size = self.properties['size']
113 recipe = self.properties['recipe']
114 file_list = json.loads(self.properties['files_list'])
115
116 files_temp = ''
117 paths_temp = ''
118 files_binb = []
119 paths_binb = []
120
121 self.tooltip_items = {}
122
123 self.set_resizable(False)
124
125 #cleaning out the recipe variable
126 recipe = recipe.split("+")[0]
127
128 vbox = gtk.VBox(True,spacing = 0)
129
130 ###################################### NAME ROW + COL #################################
131
132 self.label_short = gtk.Label()
133 self.label_short.set_size_request(300,-1)
134 self.label_short.set_selectable(True)
135 self.label_short.set_line_wrap(True)
136 self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
137 self.label_short.set_property("xalign", 0)
138
139 self.vbox.add(self.label_short)
140
141 ###################################### SIZE ROW + COL ######################################
142
143 self.label_short = gtk.Label()
144 self.label_short.set_size_request(300,-1)
145 self.label_short.set_selectable(True)
146 self.label_short.set_line_wrap(True)
147 self.label_short.set_markup("<span weight=\"bold\">Size: </span>" + size)
148 self.label_short.set_property("xalign", 0)
149
150 self.vbox.add(self.label_short)
151
152 ##################################### RECIPE ROW + COL #########################################
153
154 self.label_short = gtk.Label()
155 self.label_short.set_size_request(300,-1)
156 self.label_short.set_selectable(True)
157 self.label_short.set_line_wrap(True)
158 self.label_short.set_markup("<span weight=\"bold\">Recipe: </span>" + recipe)
159 self.label_short.set_property("xalign", 0)
160
161 self.vbox.add(self.label_short)
162
163 ##################################### BINB ROW + COL #######################################
164
165 if binb != '':
166 self.label_short = gtk.Label()
167 self.label_short.set_selectable(True)
168 self.label_short.set_line_wrap(True)
169 self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
170 self.label_short.set_property("xalign", 0)
171
172 self.label_info = gtk.Label()
173 self.label_info.set_size_request(300,-1)
174 self.label_info.set_selectable(True)
175 self.label_info.set_line_wrap(True)
176 self.label_info.set_markup(binb)
177 self.label_info.set_property("xalign", 0)
178
179 self.vbox.add(self.label_short)
180 self.vbox.add(self.label_info)
181
182 #################################### FILES BROUGHT BY PACKAGES ###################################
183
184 if file_list:
185
186 self.textWindow = gtk.ScrolledWindow()
187 self.textWindow.set_shadow_type(gtk.SHADOW_IN)
188 self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
189 self.textWindow.set_size_request(100, 170)
190
191 packagefiles_store = gtk.ListStore(str)
192
193 self.packagefiles_tv = gtk.TreeView()
194 self.packagefiles_tv.set_rules_hint(True)
195 self.packagefiles_tv.set_headers_visible(True)
196 self.textWindow.add(self.packagefiles_tv)
197
198 self.cell1 = gtk.CellRendererText()
199 col1 = gtk.TreeViewColumn('Package files', self.cell1)
200 col1.set_cell_data_func(self.cell1, self.regex_field)
201 self.packagefiles_tv.append_column(col1)
202
203 items = file_list.keys()
204 items.sort()
205 for item in items:
206 fullpath = item
207 while len(item) > 35:
208 item = item[:len(item)/2] + "" + item[len(item)/2+1:]
209 if len(item) == 35:
210 item = item[:len(item)/2] + "..." + item[len(item)/2+3:]
211 self.tooltip_items[item] = fullpath
212
213 packagefiles_store.append([str(item)])
214
215 self.packagefiles_tv.set_model(packagefiles_store)
216
217 tips = gtk.Tooltips()
218 tips.set_tip(self.packagefiles_tv, "")
219 self.packagefiles_tv.connect("motion-notify-event", self.treeViewTooltip, tips, 0)
220 self.packagefiles_tv.set_events(gtk.gdk.POINTER_MOTION_MASK)
221
222 self.vbox.add(self.textWindow)
223
224 self.vbox.show_all()
225
226
227 def regex_field(self, column, cell, model, iter):
228 cell.set_property('text', model.get_value(iter, 0))
229 return
230
231
232 def create_recipe_visual_elements(self):
233
234 summary = self.properties['summary']
235 name = self.properties['name']
236 version = self.properties['version']
237 revision = self.properties['revision']
238 binb = self.properties['binb']
239 group = self.properties['group']
240 license = self.properties['license']
241 homepage = self.properties['homepage']
242 bugtracker = self.properties['bugtracker']
243 description = self.properties['description']
244
245 self.set_resizable(False)
246
247 #cleaning out the version variable and also the summary
248 version = version.split(":")[1]
249 if len(version) > 30:
250 version = version.split("+")[0]
251 else:
252 version = version.split("-")[0]
253 license = license.replace("&" , "and")
254 if (homepage == ''):
255 homepage = 'unknown'
256 if (bugtracker == ''):
257 bugtracker = 'unknown'
258 summary = summary.split("+")[0]
259
260 #calculating the rows needed for the table
261 binb_items_count = len(binb.split(','))
262 binb_items = binb.split(',')
263
264 vbox = gtk.VBox(False,spacing = 0)
265
266 ######################################## SUMMARY LABEL #########################################
267
268 if summary != '':
269 self.label_short = gtk.Label()
270 self.label_short.set_width_chars(37)
271 self.label_short.set_selectable(True)
272 self.label_short.set_line_wrap(True)
273 self.label_short.set_markup("<b>" + summary + "</b>")
274 self.label_short.set_property("xalign", 0)
275
276 self.vbox.add(self.label_short)
277
278 ########################################## NAME ROW + COL #######################################
279
280 self.label_short = gtk.Label()
281 self.label_short.set_selectable(True)
282 self.label_short.set_line_wrap(True)
283 self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
284 self.label_short.set_property("xalign", 0)
285
286 self.vbox.add(self.label_short)
287
288 ####################################### VERSION ROW + COL ####################################
289
290 self.label_short = gtk.Label()
291 self.label_short.set_selectable(True)
292 self.label_short.set_line_wrap(True)
293 self.label_short.set_markup("<span weight=\"bold\">Version: </span>" + version)
294 self.label_short.set_property("xalign", 0)
295
296 self.vbox.add(self.label_short)
297
298 ##################################### REVISION ROW + COL #####################################
299
300 self.label_short = gtk.Label()
301 self.label_short.set_line_wrap(True)
302 self.label_short.set_selectable(True)
303 self.label_short.set_markup("<span weight=\"bold\">Revision: </span>" + revision)
304 self.label_short.set_property("xalign", 0)
305
306 self.vbox.add(self.label_short)
307
308 ################################## GROUP ROW + COL ############################################
309
310 self.label_short = gtk.Label()
311 self.label_short.set_selectable(True)
312 self.label_short.set_line_wrap(True)
313 self.label_short.set_markup("<span weight=\"bold\">Group: </span>" + group)
314 self.label_short.set_property("xalign", 0)
315
316 self.vbox.add(self.label_short)
317
318 ################################# HOMEPAGE ROW + COL ############################################
319
320 if homepage != 'unknown':
321 self.label_info = gtk.Label()
322 self.label_info.set_selectable(True)
323 self.label_info.set_line_wrap(True)
324 if len(homepage) > 35:
325 self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:35] + "..." + "</a>")
326 else:
327 self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:60] + "</a>")
328
329 self.label_info.set_property("xalign", 0)
330
331 self.label_short = gtk.Label()
332 self.label_short.set_selectable(True)
333 self.label_short.set_line_wrap(True)
334 self.label_short.set_markup("<b>Homepage: </b>")
335 self.label_short.set_property("xalign", 0)
336
337 self.vbox.add(self.label_short)
338 self.vbox.add(self.label_info)
339
340 ################################# BUGTRACKER ROW + COL ###########################################
341
342 if bugtracker != 'unknown':
343 self.label_info = gtk.Label()
344 self.label_info.set_selectable(True)
345 self.label_info.set_line_wrap(True)
346 if len(bugtracker) > 35:
347 self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:35] + "..." + "</a>")
348 else:
349 self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:60] + "</a>")
350 self.label_info.set_property("xalign", 0)
351
352 self.label_short = gtk.Label()
353 self.label_short.set_selectable(True)
354 self.label_short.set_line_wrap(True)
355 self.label_short.set_markup("<b>Bugtracker: </b>")
356 self.label_short.set_property("xalign", 0)
357
358 self.vbox.add(self.label_short)
359 self.vbox.add(self.label_info)
360
361 ################################# LICENSE ROW + COL ############################################
362
363 self.label_info = gtk.Label()
364 self.label_info.set_selectable(True)
365 self.label_info.set_line_wrap(True)
366 self.label_info.set_markup(license)
367 self.label_info.set_property("xalign", 0)
368
369 self.label_short = gtk.Label()
370 self.label_short.set_selectable(True)
371 self.label_short.set_line_wrap(True)
372 self.label_short.set_markup("<span weight=\"bold\">License: </span>")
373 self.label_short.set_property("xalign", 0)
374
375 self.vbox.add(self.label_short)
376 self.vbox.add(self.label_info)
377
378 ################################### BINB ROW+COL #############################################
379
380 if binb != '':
381 self.label_short = gtk.Label()
382 self.label_short.set_selectable(True)
383 self.label_short.set_line_wrap(True)
384 self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
385 self.label_short.set_property("xalign", 0)
386 self.vbox.add(self.label_short)
387 self.label_info = gtk.Label()
388 self.label_info.set_selectable(True)
389 self.label_info.set_width_chars(36)
390 if len(binb) > 200:
391 scrolled_window = gtk.ScrolledWindow()
392 scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
393 scrolled_window.set_size_request(100,100)
394 self.label_info.set_markup(binb)
395 self.label_info.set_padding(6,6)
396 self.label_info.set_alignment(0,0)
397 self.label_info.set_line_wrap(True)
398 scrolled_window.add_with_viewport(self.label_info)
399 self.vbox.add(scrolled_window)
400 else:
401 self.label_info.set_markup(binb)
402 self.label_info.set_property("xalign", 0)
403 self.label_info.set_line_wrap(True)
404 self.vbox.add(self.label_info)
405
406 ################################ DESCRIPTION TAG ROW #################################################
407
408 self.label_short = gtk.Label()
409 self.label_short.set_line_wrap(True)
410 self.label_short.set_markup("<span weight=\"bold\">Description </span>")
411 self.label_short.set_property("xalign", 0)
412 self.vbox.add(self.label_short)
413
414 ################################ DESCRIPTION INFORMATION ROW ##########################################
415
416 hbox = gtk.HBox(True,spacing = 0)
417
418 self.label_short = gtk.Label()
419 self.label_short.set_selectable(True)
420 self.label_short.set_width_chars(36)
421 if len(description) > 200:
422 scrolled_window = gtk.ScrolledWindow()
423 scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
424 scrolled_window.set_size_request(100,100)
425 self.label_short.set_markup(description)
426 self.label_short.set_padding(6,6)
427 self.label_short.set_alignment(0,0)
428 self.label_short.set_line_wrap(True)
429 scrolled_window.add_with_viewport(self.label_short)
430 self.vbox.add(scrolled_window)
431 else:
432 self.label_short.set_markup(description)
433 self.label_short.set_property("xalign", 0)
434 self.label_short.set_line_wrap(True)
435 self.vbox.add(self.label_short)
436
437 self.vbox.show_all()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py
new file mode 100644
index 0000000000..69e7dffb6d
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py
@@ -0,0 +1,90 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
25
26"""
27The following are convenience classes for implementing GNOME HIG compliant
28BitBake GUI's
29In summary: spacing = 12px, border-width = 6px
30"""
31
32class ProxyDetailsDialog (CrumbsDialog):
33
34 def __init__(self, title, user, passwd, parent, flags, buttons=None):
35 super(ProxyDetailsDialog, self).__init__(title, parent, flags, buttons)
36 self.connect("response", self.response_cb)
37
38 self.auth = not (user == None or passwd == None or user == "")
39 self.user = user or ""
40 self.passwd = passwd or ""
41
42 # create visual elements on the dialog
43 self.create_visual_elements()
44
45 def create_visual_elements(self):
46 self.auth_checkbox = gtk.CheckButton("Use authentication")
47 self.auth_checkbox.set_tooltip_text("Check this box to set the username and the password")
48 self.auth_checkbox.set_active(self.auth)
49 self.auth_checkbox.connect("toggled", self.auth_checkbox_toggled_cb)
50 self.vbox.pack_start(self.auth_checkbox, expand=False, fill=False)
51
52 hbox = gtk.HBox(False, 6)
53 self.user_label = gtk.Label("Username:")
54 self.user_text = gtk.Entry()
55 self.user_text.set_text(self.user)
56 hbox.pack_start(self.user_label, expand=False, fill=False)
57 hbox.pack_end(self.user_text, expand=False, fill=False)
58 self.vbox.pack_start(hbox, expand=False, fill=False)
59
60 hbox = gtk.HBox(False, 6)
61 self.passwd_label = gtk.Label("Password:")
62 self.passwd_text = gtk.Entry()
63 self.passwd_text.set_text(self.passwd)
64 hbox.pack_start(self.passwd_label, expand=False, fill=False)
65 hbox.pack_end(self.passwd_text, expand=False, fill=False)
66 self.vbox.pack_start(hbox, expand=False, fill=False)
67
68 self.refresh_auth_components()
69 self.show_all()
70
71 def refresh_auth_components(self):
72 self.user_label.set_sensitive(self.auth)
73 self.user_text.set_editable(self.auth)
74 self.user_text.set_sensitive(self.auth)
75 self.passwd_label.set_sensitive(self.auth)
76 self.passwd_text.set_editable(self.auth)
77 self.passwd_text.set_sensitive(self.auth)
78
79 def auth_checkbox_toggled_cb(self, button):
80 self.auth = self.auth_checkbox.get_active()
81 self.refresh_auth_components()
82
83 def response_cb(self, dialog, response_id):
84 if response_id == gtk.RESPONSE_OK:
85 if self.auth:
86 self.user = self.user_text.get_text()
87 self.passwd = self.passwd_text.get_text()
88 else:
89 self.user = None
90 self.passwd = None
diff --git a/bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py b/bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py
new file mode 100644
index 0000000000..9017139850
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py
@@ -0,0 +1,51 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22
23class RetrieveImageDialog (gtk.FileChooserDialog):
24 """
25 This class is used to create a dialog that permits to retrieve
26 a custom image saved previously from Hob.
27 """
28 def __init__(self, directory,title, parent, flags, buttons=None):
29 super(RetrieveImageDialog, self).__init__(title, None, gtk.FILE_CHOOSER_ACTION_OPEN,
30 (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN, gtk.RESPONSE_OK))
31 self.directory = directory
32
33 # create visual elements on the dialog
34 self.create_visual_elements()
35
36 def create_visual_elements(self):
37 self.set_show_hidden(True)
38 self.set_default_response(gtk.RESPONSE_OK)
39 self.set_current_folder(self.directory)
40
41 vbox = self.get_children()[0].get_children()[0].get_children()[0]
42 for child in vbox.get_children()[0].get_children()[0].get_children()[0].get_children():
43 vbox.get_children()[0].get_children()[0].get_children()[0].remove(child)
44
45 label1 = gtk.Label()
46 label1.set_text("File system" + self.directory)
47 label1.show()
48 vbox.get_children()[0].get_children()[0].get_children()[0].pack_start(label1, expand=False, fill=False, padding=0)
49 vbox.get_children()[0].get_children()[1].get_children()[0].hide()
50
51 self.get_children()[0].get_children()[1].get_children()[0].set_label("Select")
diff --git a/bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py b/bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py
new file mode 100644
index 0000000000..4195f70e1e
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py
@@ -0,0 +1,159 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2013 Intel Corporation
5#
6# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22import glib
23from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
24from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
25from bb.ui.crumbs.hobwidget import HobButton
26
27class SaveImageDialog (CrumbsDialog):
28 """
29 This class is used to create a dialog that permits to save
30 a custom image in a predefined directory.
31 """
32 def __init__(self, directory, name, description, title, parent, flags, buttons=None):
33 super(SaveImageDialog, self).__init__(title, parent, flags, buttons)
34 self.directory = directory
35 self.builder = parent
36 self.name_field = name
37 self.description_field = description
38
39 # create visual elements on the dialog
40 self.create_visual_elements()
41
42 def create_visual_elements(self):
43 self.set_default_response(gtk.RESPONSE_OK)
44 self.vbox.set_border_width(6)
45
46 sub_vbox = gtk.VBox(False, 12)
47 self.vbox.pack_start(sub_vbox, expand=False, fill=False)
48 label = gtk.Label()
49 label.set_alignment(0, 0)
50 label.set_markup("<b>Name</b>")
51 sub_label = gtk.Label()
52 sub_label.set_alignment(0, 0)
53 content = "Image recipe names should be all lowercase and include only alphanumeric\n"
54 content += "characters. The only special character you can use is the ASCII hyphen (-)."
55 sub_label.set_markup(content)
56 self.name_entry = gtk.Entry()
57 self.name_entry.set_text(self.name_field)
58 self.name_entry.set_size_request(350,30)
59 self.name_entry.connect("changed", self.name_entry_changed)
60 sub_vbox.pack_start(label, expand=False, fill=False)
61 sub_vbox.pack_start(sub_label, expand=False, fill=False)
62 sub_vbox.pack_start(self.name_entry, expand=False, fill=False)
63
64 sub_vbox = gtk.VBox(False, 12)
65 self.vbox.pack_start(sub_vbox, expand=False, fill=False)
66 label = gtk.Label()
67 label.set_alignment(0, 0)
68 label.set_markup("<b>Description</b> (optional)")
69 sub_label = gtk.Label()
70 sub_label.set_alignment(0, 0)
71 sub_label.set_markup("The description should be less than 150 characters long.")
72 self.description_entry = gtk.TextView()
73 description_buffer = self.description_entry.get_buffer()
74 description_buffer.set_text(self.description_field)
75 description_buffer.connect("insert-text", self.limit_description_length)
76 self.description_entry.set_wrap_mode(gtk.WRAP_WORD)
77 self.description_entry.set_size_request(350,50)
78 sub_vbox.pack_start(label, expand=False, fill=False)
79 sub_vbox.pack_start(sub_label, expand=False, fill=False)
80 sub_vbox.pack_start(self.description_entry, expand=False, fill=False)
81
82 sub_vbox = gtk.VBox(False, 12)
83 self.vbox.pack_start(sub_vbox, expand=False, fill=False)
84 label = gtk.Label()
85 label.set_alignment(0, 0)
86 label.set_markup("Your image recipe will be saved to:")
87 sub_label = gtk.Label()
88 sub_label.set_alignment(0, 0)
89 sub_label.set_markup(self.directory)
90 sub_vbox.pack_start(label, expand=False, fill=False)
91 sub_vbox.pack_start(sub_label, expand=False, fill=False)
92
93 table = gtk.Table(1, 4, True)
94
95 cancel_button = gtk.Button()
96 cancel_button.set_label("Cancel")
97 cancel_button.connect("clicked", self.cancel_button_cb)
98 cancel_button.set_size_request(110, 30)
99
100 self.save_button = gtk.Button()
101 self.save_button.set_label("Save")
102 self.save_button.connect("clicked", self.save_button_cb)
103 self.save_button.set_size_request(110, 30)
104 if self.name_entry.get_text() == '':
105 self.save_button.set_sensitive(False)
106
107 table.attach(cancel_button, 2, 3, 0, 1)
108 table.attach(self.save_button, 3, 4, 0, 1)
109 self.vbox.pack_end(table, expand=False, fill=False)
110
111 self.show_all()
112
113 def limit_description_length(self, textbuffer, iter, text, length):
114 buffer_bounds = textbuffer.get_bounds()
115 entire_text = textbuffer.get_text(*buffer_bounds)
116 entire_text += text
117 if len(entire_text)>150 or text=="\n":
118 textbuffer.emit_stop_by_name("insert-text")
119
120 def name_entry_changed(self, entry):
121 text = entry.get_text()
122 if text == '':
123 self.save_button.set_sensitive(False)
124 else:
125 self.save_button.set_sensitive(True)
126
127 def cancel_button_cb(self, button):
128 self.destroy()
129
130 def save_button_cb(self, button):
131 text = self.name_entry.get_text()
132 new_text = text.replace("-","")
133 description_buffer = self.description_entry.get_buffer()
134 description = description_buffer.get_text(description_buffer.get_start_iter(),description_buffer.get_end_iter())
135 if new_text.islower() and new_text.isalnum():
136 self.builder.image_details_page.image_saved = True
137 self.builder.customized = False
138 self.builder.generate_new_image(self.directory+text, description)
139 self.builder.recipe_model.set_in_list(text, description)
140 self.builder.recipe_model.set_selected_image(text)
141 self.builder.image_details_page.show_page(self.builder.IMAGE_GENERATED)
142 self.builder.image_details_page.name_field_template = text
143 self.builder.image_details_page.description_field_template = description
144 self.destroy()
145 else:
146 self.show_invalid_input_error_dialog()
147
148 def show_invalid_input_error_dialog(self):
149 lbl = "<b>Invalid characters in image recipe name</b>"
150 msg = "Image recipe names should be all lowercase and\n"
151 msg += "include only alphanumeric characters. The only\n"
152 msg += "special character you can use is the ASCII hyphen (-)."
153 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_ERROR, msg)
154 button = dialog.add_button("Close", gtk.RESPONSE_OK)
155 HobButton.style_button(button)
156
157 res = dialog.run()
158 self.name_entry.grab_focus()
159 dialog.destroy()
diff --git a/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py b/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
new file mode 100644
index 0000000000..e0285c93ce
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
@@ -0,0 +1,122 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import os
25from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton, HobAltButton
26
27"""
28The following are convenience classes for implementing GNOME HIG compliant
29BitBake GUI's
30In summary: spacing = 12px, border-width = 6px
31"""
32
33class SettingsUIHelper():
34
35 def gen_label_widget(self, content):
36 label = gtk.Label()
37 label.set_alignment(0, 0)
38 label.set_markup(content)
39 label.show()
40 return label
41
42 def gen_label_info_widget(self, content, tooltip):
43 table = gtk.Table(1, 10, False)
44 label = self.gen_label_widget(content)
45 info = HobInfoButton(tooltip, self)
46 table.attach(label, 0, 1, 0, 1, xoptions=gtk.FILL)
47 table.attach(info, 1, 2, 0, 1, xoptions=gtk.FILL, xpadding=10)
48 return table
49
50 def gen_spinner_widget(self, content, lower, upper, tooltip=""):
51 hbox = gtk.HBox(False, 12)
52 adjust = gtk.Adjustment(value=content, lower=lower, upper=upper, step_incr=1)
53 spinner = gtk.SpinButton(adjustment=adjust, climb_rate=1, digits=0)
54
55 spinner.set_value(content)
56 hbox.pack_start(spinner, expand=False, fill=False)
57
58 info = HobInfoButton(tooltip, self)
59 hbox.pack_start(info, expand=False, fill=False)
60
61 hbox.show_all()
62 return hbox, spinner
63
64 def gen_combo_widget(self, curr_item, all_item, tooltip=""):
65 hbox = gtk.HBox(False, 12)
66 combo = gtk.combo_box_new_text()
67 hbox.pack_start(combo, expand=False, fill=False)
68
69 index = 0
70 for item in all_item or []:
71 combo.append_text(item)
72 if item == curr_item:
73 combo.set_active(index)
74 index += 1
75
76 info = HobInfoButton(tooltip, self)
77 hbox.pack_start(info, expand=False, fill=False)
78
79 hbox.show_all()
80 return hbox, combo
81
82 def entry_widget_select_path_cb(self, action, parent, entry):
83 dialog = gtk.FileChooserDialog("", parent,
84 gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
85 text = entry.get_text()
86 dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
87 button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
88 HobAltButton.style_button(button)
89 button = dialog.add_button("Open", gtk.RESPONSE_YES)
90 HobButton.style_button(button)
91 response = dialog.run()
92 if response == gtk.RESPONSE_YES:
93 path = dialog.get_filename()
94 entry.set_text(path)
95
96 dialog.destroy()
97
98 def gen_entry_widget(self, content, parent, tooltip="", need_button=True):
99 hbox = gtk.HBox(False, 12)
100 entry = gtk.Entry()
101 entry.set_text(content)
102 entry.set_size_request(350,30)
103
104 if need_button:
105 table = gtk.Table(1, 10, False)
106 hbox.pack_start(table, expand=True, fill=True)
107 table.attach(entry, 0, 9, 0, 1, xoptions=gtk.SHRINK)
108 image = gtk.Image()
109 image.set_from_stock(gtk.STOCK_OPEN,gtk.ICON_SIZE_BUTTON)
110 open_button = gtk.Button()
111 open_button.set_image(image)
112 open_button.connect("clicked", self.entry_widget_select_path_cb, parent, entry)
113 table.attach(open_button, 9, 10, 0, 1, xoptions=gtk.SHRINK)
114 else:
115 hbox.pack_start(entry, expand=True, fill=True)
116
117 if tooltip != "":
118 info = HobInfoButton(tooltip, self)
119 hbox.pack_start(info, expand=False, fill=False)
120
121 hbox.show_all()
122 return hbox, entry
diff --git a/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py
new file mode 100644
index 0000000000..b5eb3d8738
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py
@@ -0,0 +1,891 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011-2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import gobject
25import hashlib
26from bb.ui.crumbs.hobwidget import hic, HobInfoButton, HobButton, HobAltButton
27from bb.ui.crumbs.progressbar import HobProgressBar
28from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
29from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
30from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
31from bb.ui.crumbs.hig.proxydetailsdialog import ProxyDetailsDialog
32
33"""
34The following are convenience classes for implementing GNOME HIG compliant
35BitBake GUI's
36In summary: spacing = 12px, border-width = 6px
37"""
38
39class SimpleSettingsDialog (CrumbsDialog, SettingsUIHelper):
40
41 (BUILD_ENV_PAGE_ID,
42 SHARED_STATE_PAGE_ID,
43 PROXIES_PAGE_ID,
44 OTHERS_PAGE_ID) = range(4)
45
46 (TEST_NETWORK_NONE,
47 TEST_NETWORK_INITIAL,
48 TEST_NETWORK_RUNNING,
49 TEST_NETWORK_PASSED,
50 TEST_NETWORK_FAILED,
51 TEST_NETWORK_CANCELED) = range(6)
52
53 TARGETS = [
54 ("MY_TREE_MODEL_ROW", gtk.TARGET_SAME_WIDGET, 0),
55 ("text/plain", 0, 1),
56 ("TEXT", 0, 2),
57 ("STRING", 0, 3),
58 ]
59
60 def __init__(self, title, configuration, all_image_types,
61 all_package_formats, all_distros, all_sdk_machines,
62 max_threads, parent, flags, handler, buttons=None):
63 super(SimpleSettingsDialog, self).__init__(title, parent, flags, buttons)
64
65 # class members from other objects
66 # bitbake settings from Builder.Configuration
67 self.configuration = configuration
68 self.image_types = all_image_types
69 self.all_package_formats = all_package_formats
70 self.all_distros = all_distros
71 self.all_sdk_machines = all_sdk_machines
72 self.max_threads = max_threads
73
74 # class members for internal use
75 self.dldir_text = None
76 self.sstatedir_text = None
77 self.sstatemirrors_list = []
78 self.sstatemirrors_changed = 0
79 self.bb_spinner = None
80 self.pmake_spinner = None
81 self.rootfs_size_spinner = None
82 self.extra_size_spinner = None
83 self.gplv3_checkbox = None
84 self.toolchain_checkbox = None
85 self.setting_store = None
86 self.image_types_checkbuttons = {}
87
88 self.md5 = self.config_md5()
89 self.proxy_md5 = self.config_proxy_md5()
90 self.settings_changed = False
91 self.proxy_settings_changed = False
92 self.handler = handler
93 self.proxy_test_ran = False
94 self.selected_mirror_row = 0
95 self.new_mirror = False
96
97 # create visual elements on the dialog
98 self.create_visual_elements()
99 self.connect("response", self.response_cb)
100
101 def _get_sorted_value(self, var):
102 return " ".join(sorted(str(var).split())) + "\n"
103
104 def config_proxy_md5(self):
105 data = ("ENABLE_PROXY: " + self._get_sorted_value(self.configuration.enable_proxy))
106 if self.configuration.enable_proxy:
107 for protocol in self.configuration.proxies.keys():
108 data += (protocol + ": " + self._get_sorted_value(self.configuration.combine_proxy(protocol)))
109 return hashlib.md5(data).hexdigest()
110
111 def config_md5(self):
112 data = ""
113 for key in self.configuration.extra_setting.keys():
114 data += (key + ": " + self._get_sorted_value(self.configuration.extra_setting[key]))
115 return hashlib.md5(data).hexdigest()
116
117 def gen_proxy_entry_widget(self, protocol, parent, need_button=True, line=0):
118 label = gtk.Label(protocol.upper() + " proxy")
119 self.proxy_table.attach(label, 0, 1, line, line+1, xpadding=24)
120
121 proxy_entry = gtk.Entry()
122 proxy_entry.set_size_request(300, -1)
123 self.proxy_table.attach(proxy_entry, 1, 2, line, line+1, ypadding=4)
124
125 self.proxy_table.attach(gtk.Label(":"), 2, 3, line, line+1, xpadding=12, ypadding=4)
126
127 port_entry = gtk.Entry()
128 port_entry.set_size_request(60, -1)
129 self.proxy_table.attach(port_entry, 3, 4, line, line+1, ypadding=4)
130
131 details_button = HobAltButton("Details")
132 details_button.connect("clicked", self.details_cb, parent, protocol)
133 self.proxy_table.attach(details_button, 4, 5, line, line+1, xpadding=4, yoptions=gtk.EXPAND)
134
135 return proxy_entry, port_entry, details_button
136
137 def refresh_proxy_components(self):
138 self.same_checkbox.set_sensitive(self.configuration.enable_proxy)
139
140 self.http_proxy.set_text(self.configuration.combine_host_only("http"))
141 self.http_proxy.set_editable(self.configuration.enable_proxy)
142 self.http_proxy.set_sensitive(self.configuration.enable_proxy)
143 self.http_proxy_port.set_text(self.configuration.combine_port_only("http"))
144 self.http_proxy_port.set_editable(self.configuration.enable_proxy)
145 self.http_proxy_port.set_sensitive(self.configuration.enable_proxy)
146 self.http_proxy_details.set_sensitive(self.configuration.enable_proxy)
147
148 self.https_proxy.set_text(self.configuration.combine_host_only("https"))
149 self.https_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
150 self.https_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
151 self.https_proxy_port.set_text(self.configuration.combine_port_only("https"))
152 self.https_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
153 self.https_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
154 self.https_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
155
156 self.ftp_proxy.set_text(self.configuration.combine_host_only("ftp"))
157 self.ftp_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
158 self.ftp_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
159 self.ftp_proxy_port.set_text(self.configuration.combine_port_only("ftp"))
160 self.ftp_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
161 self.ftp_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
162 self.ftp_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
163
164 self.socks_proxy.set_text(self.configuration.combine_host_only("socks"))
165 self.socks_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
166 self.socks_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
167 self.socks_proxy_port.set_text(self.configuration.combine_port_only("socks"))
168 self.socks_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
169 self.socks_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
170 self.socks_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
171
172 self.cvs_proxy.set_text(self.configuration.combine_host_only("cvs"))
173 self.cvs_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
174 self.cvs_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
175 self.cvs_proxy_port.set_text(self.configuration.combine_port_only("cvs"))
176 self.cvs_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
177 self.cvs_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
178 self.cvs_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
179
180 if self.configuration.same_proxy:
181 if self.http_proxy.get_text():
182 [w.set_text(self.http_proxy.get_text()) for w in self.same_proxy_addresses]
183 if self.http_proxy_port.get_text():
184 [w.set_text(self.http_proxy_port.get_text()) for w in self.same_proxy_ports]
185
186 def proxy_checkbox_toggled_cb(self, button):
187 self.configuration.enable_proxy = self.proxy_checkbox.get_active()
188 if not self.configuration.enable_proxy:
189 self.configuration.same_proxy = False
190 self.same_checkbox.set_active(self.configuration.same_proxy)
191 self.save_proxy_data()
192 self.refresh_proxy_components()
193
194 def same_checkbox_toggled_cb(self, button):
195 self.configuration.same_proxy = self.same_checkbox.get_active()
196 self.save_proxy_data()
197 self.refresh_proxy_components()
198
199 def save_proxy_data(self):
200 self.configuration.split_proxy("http", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
201 if self.configuration.same_proxy:
202 self.configuration.split_proxy("https", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
203 self.configuration.split_proxy("ftp", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
204 self.configuration.split_proxy("socks", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
205 self.configuration.split_proxy("cvs", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
206 else:
207 self.configuration.split_proxy("https", self.https_proxy.get_text() + ":" + self.https_proxy_port.get_text())
208 self.configuration.split_proxy("ftp", self.ftp_proxy.get_text() + ":" + self.ftp_proxy_port.get_text())
209 self.configuration.split_proxy("socks", self.socks_proxy.get_text() + ":" + self.socks_proxy_port.get_text())
210 self.configuration.split_proxy("cvs", self.cvs_proxy.get_text() + ":" + self.cvs_proxy_port.get_text())
211
212 def response_cb(self, dialog, response_id):
213 if response_id == gtk.RESPONSE_YES:
214 if self.proxy_checkbox.get_active():
215 # Check that all proxy entries have a corresponding port
216 for proxy, port in zip(self.all_proxy_addresses, self.all_proxy_ports):
217 if proxy.get_text() and not port.get_text():
218 lbl = "<b>Enter all port numbers</b>"
219 msg = "Proxy servers require a port number. Please make sure you have entered a port number for each proxy server."
220 dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
221 button = dialog.add_button("Close", gtk.RESPONSE_OK)
222 HobButton.style_button(button)
223 response = dialog.run()
224 dialog.destroy()
225 self.emit_stop_by_name("response")
226 return
227
228 self.configuration.dldir = self.dldir_text.get_text()
229 self.configuration.sstatedir = self.sstatedir_text.get_text()
230 self.configuration.sstatemirror = ""
231 for mirror in self.sstatemirrors_list:
232 if mirror[1] != "" and mirror[2].startswith("file://"):
233 smirror = mirror[2] + " " + mirror[1] + " \\n "
234 self.configuration.sstatemirror += smirror
235 self.configuration.bbthread = self.bb_spinner.get_value_as_int()
236 self.configuration.pmake = self.pmake_spinner.get_value_as_int()
237 self.save_proxy_data()
238 self.configuration.extra_setting = {}
239 it = self.setting_store.get_iter_first()
240 while it:
241 key = self.setting_store.get_value(it, 0)
242 value = self.setting_store.get_value(it, 1)
243 self.configuration.extra_setting[key] = value
244 it = self.setting_store.iter_next(it)
245
246 md5 = self.config_md5()
247 self.settings_changed = (self.md5 != md5)
248 self.proxy_settings_changed = (self.proxy_md5 != self.config_proxy_md5())
249
250 def create_build_environment_page(self):
251 advanced_vbox = gtk.VBox(False, 6)
252 advanced_vbox.set_border_width(6)
253
254 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Parallel threads</span>'), expand=False, fill=False)
255 sub_vbox = gtk.VBox(False, 6)
256 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
257 label = self.gen_label_widget("BitBake parallel threads")
258 tooltip = "Sets the number of threads that BitBake tasks can simultaneously run. See the <a href=\""
259 tooltip += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
260 tooltip += "poky-ref-manual.html#var-BB_NUMBER_THREADS\">Poky reference manual</a> for information"
261 bbthread_widget, self.bb_spinner = self.gen_spinner_widget(self.configuration.bbthread, 1, self.max_threads,"<b>BitBake prallalel threads</b>" + "*" + tooltip)
262 sub_vbox.pack_start(label, expand=False, fill=False)
263 sub_vbox.pack_start(bbthread_widget, expand=False, fill=False)
264
265 sub_vbox = gtk.VBox(False, 6)
266 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
267 label = self.gen_label_widget("Make parallel threads")
268 tooltip = "Sets the maximum number of threads the host can use during the build. See the <a href=\""
269 tooltip += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
270 tooltip += "poky-ref-manual.html#var-PARALLEL_MAKE\">Poky reference manual</a> for information"
271 pmake_widget, self.pmake_spinner = self.gen_spinner_widget(self.configuration.pmake, 1, self.max_threads,"<b>Make parallel threads</b>" + "*" + tooltip)
272 sub_vbox.pack_start(label, expand=False, fill=False)
273 sub_vbox.pack_start(pmake_widget, expand=False, fill=False)
274
275 advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Downloaded source code</span>'), expand=False, fill=False)
276 sub_vbox = gtk.VBox(False, 6)
277 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
278 label = self.gen_label_widget("Downloads directory")
279 tooltip = "Select a folder that caches the upstream project source code"
280 dldir_widget, self.dldir_text = self.gen_entry_widget(self.configuration.dldir, self,"<b>Downloaded source code</b>" + "*" + tooltip)
281 sub_vbox.pack_start(label, expand=False, fill=False)
282 sub_vbox.pack_start(dldir_widget, expand=False, fill=False)
283
284 return advanced_vbox
285
286 def create_shared_state_page(self):
287 advanced_vbox = gtk.VBox(False)
288 advanced_vbox.set_border_width(12)
289
290 sub_vbox = gtk.VBox(False)
291 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False, padding=24)
292 content = "<span>Shared state directory</span>"
293 tooltip = "Select a folder that caches your prebuilt results"
294 label = self.gen_label_info_widget(content,"<b>Shared state directory</b>" + "*" + tooltip)
295 sstatedir_widget, self.sstatedir_text = self.gen_entry_widget(self.configuration.sstatedir, self)
296 sub_vbox.pack_start(label, expand=False, fill=False)
297 sub_vbox.pack_start(sstatedir_widget, expand=False, fill=False, padding=6)
298
299 content = "<span weight=\"bold\">Shared state mirrors</span>"
300 tooltip = "URLs pointing to pre-built mirrors that will speed your build. "
301 tooltip += "Select the \'Standard\' configuration if the structure of your "
302 tooltip += "mirror replicates the structure of your local shared state directory. "
303 tooltip += "For more information on shared state mirrors, check the <a href=\""
304 tooltip += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
305 tooltip += "poky-ref-manual.html#shared-state\">Yocto Project Reference Manual</a>."
306 table = self.gen_label_info_widget(content,"<b>Shared state mirrors</b>" + "*" + tooltip)
307 advanced_vbox.pack_start(table, expand=False, fill=False, padding=6)
308
309 sub_vbox = gtk.VBox(False)
310 advanced_vbox.pack_start(sub_vbox, gtk.TRUE, gtk.TRUE, 0)
311
312 if self.sstatemirrors_changed == 0:
313 self.sstatemirrors_changed = 1
314 sstatemirrors = self.configuration.sstatemirror
315 if sstatemirrors == "":
316 sm_list = ["Standard", "", "file://(.*)"]
317 self.sstatemirrors_list.append(sm_list)
318 else:
319 sstatemirrors = [x for x in sstatemirrors.split('\\n')]
320 for sstatemirror in sstatemirrors:
321 sstatemirror_fields = [x for x in sstatemirror.split(' ') if x.strip()]
322 if len(sstatemirror_fields) == 2:
323 if sstatemirror_fields[0] == "file://(.*)" or sstatemirror_fields[0] == "file://.*":
324 sm_list = ["Standard", sstatemirror_fields[1], sstatemirror_fields[0]]
325 else:
326 sm_list = ["Custom", sstatemirror_fields[1], sstatemirror_fields[0]]
327 self.sstatemirrors_list.append(sm_list)
328
329 sstatemirrors_widget, sstatemirrors_store = self.gen_shared_sstate_widget(self.sstatemirrors_list, self)
330 sub_vbox.pack_start(sstatemirrors_widget, expand=True, fill=True)
331
332 table = gtk.Table(1, 10, False)
333 table.set_col_spacings(6)
334 add_mirror_button = HobAltButton("Add mirror")
335 add_mirror_button.connect("clicked", self.add_mirror)
336 add_mirror_button.set_size_request(120,30)
337 table.attach(add_mirror_button, 1, 2, 0, 1, xoptions=gtk.SHRINK)
338
339 self.delete_button = HobAltButton("Delete mirror")
340 self.delete_button.connect("clicked", self.delete_cb)
341 self.delete_button.set_size_request(120, 30)
342 table.attach(self.delete_button, 3, 4, 0, 1, xoptions=gtk.SHRINK)
343
344 advanced_vbox.pack_start(table, expand=False, fill=False, padding=6)
345
346 return advanced_vbox
347
348 def gen_shared_sstate_widget(self, sstatemirrors_list, window):
349 hbox = gtk.HBox(False)
350
351 sstatemirrors_store = gtk.ListStore(str, str, str)
352 for sstatemirror in sstatemirrors_list:
353 sstatemirrors_store.append(sstatemirror)
354
355 self.sstatemirrors_tv = gtk.TreeView()
356 self.sstatemirrors_tv.set_rules_hint(True)
357 self.sstatemirrors_tv.set_headers_visible(True)
358 tree_selection = self.sstatemirrors_tv.get_selection()
359 tree_selection.set_mode(gtk.SELECTION_SINGLE)
360
361 # Allow enable drag and drop of rows including row move
362 self.sstatemirrors_tv.enable_model_drag_source( gtk.gdk.BUTTON1_MASK,
363 self.TARGETS,
364 gtk.gdk.ACTION_DEFAULT|
365 gtk.gdk.ACTION_MOVE)
366 self.sstatemirrors_tv.enable_model_drag_dest(self.TARGETS,
367 gtk.gdk.ACTION_DEFAULT)
368 self.sstatemirrors_tv.connect("drag_data_get", self.drag_data_get_cb)
369 self.sstatemirrors_tv.connect("drag_data_received", self.drag_data_received_cb)
370
371
372 self.scroll = gtk.ScrolledWindow()
373 self.scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
374 self.scroll.set_shadow_type(gtk.SHADOW_IN)
375 self.scroll.connect('size-allocate', self.scroll_changed)
376 self.scroll.add(self.sstatemirrors_tv)
377
378 #list store for cell renderer
379 m = gtk.ListStore(gobject.TYPE_STRING)
380 m.append(["Standard"])
381 m.append(["Custom"])
382
383 cell0 = gtk.CellRendererCombo()
384 cell0.set_property("model",m)
385 cell0.set_property("text-column", 0)
386 cell0.set_property("editable", True)
387 cell0.set_property("has-entry", False)
388 col0 = gtk.TreeViewColumn("Configuration")
389 col0.pack_start(cell0, False)
390 col0.add_attribute(cell0, "text", 0)
391 col0.set_cell_data_func(cell0, self.configuration_field)
392 self.sstatemirrors_tv.append_column(col0)
393
394 cell0.connect("edited", self.combo_changed, sstatemirrors_store)
395
396 self.cell1 = gtk.CellRendererText()
397 self.cell1.set_padding(5,2)
398 col1 = gtk.TreeViewColumn('Regex', self.cell1)
399 col1.set_cell_data_func(self.cell1, self.regex_field)
400 self.sstatemirrors_tv.append_column(col1)
401
402 self.cell1.connect("edited", self.regex_changed, sstatemirrors_store)
403
404 cell2 = gtk.CellRendererText()
405 cell2.set_padding(5,2)
406 cell2.set_property("editable", True)
407 col2 = gtk.TreeViewColumn('URL', cell2)
408 col2.set_cell_data_func(cell2, self.url_field)
409 self.sstatemirrors_tv.append_column(col2)
410
411 cell2.connect("edited", self.url_changed, sstatemirrors_store)
412
413 self.sstatemirrors_tv.set_model(sstatemirrors_store)
414 self.sstatemirrors_tv.set_cursor(self.selected_mirror_row)
415 hbox.pack_start(self.scroll, expand=True, fill=True)
416 hbox.show_all()
417
418 return hbox, sstatemirrors_store
419
420 def drag_data_get_cb(self, treeview, context, selection, target_id, etime):
421 treeselection = treeview.get_selection()
422 model, iter = treeselection.get_selected()
423 data = model.get_string_from_iter(iter)
424 selection.set(selection.target, 8, data)
425
426 def drag_data_received_cb(self, treeview, context, x, y, selection, info, etime):
427 model = treeview.get_model()
428 data = []
429 tree_iter = model.get_iter_from_string(selection.data)
430 data.append(model.get_value(tree_iter, 0))
431 data.append(model.get_value(tree_iter, 1))
432 data.append(model.get_value(tree_iter, 2))
433
434 drop_info = treeview.get_dest_row_at_pos(x, y)
435 if drop_info:
436 path, position = drop_info
437 iter = model.get_iter(path)
438 if (position == gtk.TREE_VIEW_DROP_BEFORE or position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE):
439 model.insert_before(iter, data)
440 else:
441 model.insert_after(iter, data)
442 else:
443 model.append(data)
444 if context.action == gtk.gdk.ACTION_MOVE:
445 context.finish(True, True, etime)
446 return
447
448 def delete_cb(self, button):
449 selection = self.sstatemirrors_tv.get_selection()
450 tree_model, tree_iter = selection.get_selected()
451 index = int(tree_model.get_string_from_iter(tree_iter))
452 if index == 0:
453 self.selected_mirror_row = index
454 else:
455 self.selected_mirror_row = index - 1
456 self.sstatemirrors_list.pop(index)
457 self.refresh_shared_state_page()
458 if not self.sstatemirrors_list:
459 self.delete_button.set_sensitive(False)
460
461 def add_mirror(self, button):
462 self.new_mirror = True
463 tooltip = "Select the pre-built mirror that will speed your build"
464 index = len(self.sstatemirrors_list)
465 self.selected_mirror_row = index
466 sm_list = ["Standard", "", "file://(.*)"]
467 self.sstatemirrors_list.append(sm_list)
468 self.refresh_shared_state_page()
469
470 def scroll_changed(self, widget, event, data=None):
471 if self.new_mirror == True:
472 adj = widget.get_vadjustment()
473 adj.set_value(adj.upper - adj.page_size)
474 self.new_mirror = False
475
476 def combo_changed(self, widget, path, text, model):
477 model[path][0] = text
478 selection = self.sstatemirrors_tv.get_selection()
479 tree_model, tree_iter = selection.get_selected()
480 index = int(tree_model.get_string_from_iter(tree_iter))
481 self.sstatemirrors_list[index][0] = text
482
483 def regex_changed(self, cell, path, new_text, user_data):
484 user_data[path][2] = new_text
485 selection = self.sstatemirrors_tv.get_selection()
486 tree_model, tree_iter = selection.get_selected()
487 index = int(tree_model.get_string_from_iter(tree_iter))
488 self.sstatemirrors_list[index][2] = new_text
489 return
490
491 def url_changed(self, cell, path, new_text, user_data):
492 if new_text!="Enter the mirror URL" and new_text!="Match regex and replace it with this URL":
493 user_data[path][1] = new_text
494 selection = self.sstatemirrors_tv.get_selection()
495 tree_model, tree_iter = selection.get_selected()
496 index = int(tree_model.get_string_from_iter(tree_iter))
497 self.sstatemirrors_list[index][1] = new_text
498 return
499
500 def configuration_field(self, column, cell, model, iter):
501 cell.set_property('text', model.get_value(iter, 0))
502 if model.get_value(iter, 0) == "Standard":
503 self.cell1.set_property("sensitive", False)
504 self.cell1.set_property("editable", False)
505 else:
506 self.cell1.set_property("sensitive", True)
507 self.cell1.set_property("editable", True)
508 return
509
510 def regex_field(self, column, cell, model, iter):
511 cell.set_property('text', model.get_value(iter, 2))
512 return
513
514 def url_field(self, column, cell, model, iter):
515 text = model.get_value(iter, 1)
516 if text == "":
517 if model.get_value(iter, 0) == "Standard":
518 text = "Enter the mirror URL"
519 else:
520 text = "Match regex and replace it with this URL"
521 cell.set_property('text', text)
522 return
523
524 def refresh_shared_state_page(self):
525 page_num = self.nb.get_current_page()
526 self.nb.remove_page(page_num);
527 self.nb.insert_page(self.create_shared_state_page(), gtk.Label("Shared state"),page_num)
528 self.show_all()
529 self.nb.set_current_page(page_num)
530
531 def test_proxy_ended(self, passed):
532 self.proxy_test_running = False
533 self.set_test_proxy_state(self.TEST_NETWORK_PASSED if passed else self.TEST_NETWORK_FAILED)
534 self.set_sensitive(True)
535 self.refresh_proxy_components()
536
537 def timer_func(self):
538 self.test_proxy_progress.pulse()
539 return self.proxy_test_running
540
541 def test_network_button_cb(self, b):
542 self.set_test_proxy_state(self.TEST_NETWORK_RUNNING)
543 self.set_sensitive(False)
544 self.save_proxy_data()
545 if self.configuration.enable_proxy == True:
546 self.handler.set_http_proxy(self.configuration.combine_proxy("http"))
547 self.handler.set_https_proxy(self.configuration.combine_proxy("https"))
548 self.handler.set_ftp_proxy(self.configuration.combine_proxy("ftp"))
549 self.handler.set_socks_proxy(self.configuration.combine_proxy("socks"))
550 self.handler.set_cvs_proxy(self.configuration.combine_host_only("cvs"), self.configuration.combine_port_only("cvs"))
551 elif self.configuration.enable_proxy == False:
552 self.handler.set_http_proxy("")
553 self.handler.set_https_proxy("")
554 self.handler.set_ftp_proxy("")
555 self.handler.set_socks_proxy("")
556 self.handler.set_cvs_proxy("", "")
557 self.proxy_test_ran = True
558 self.proxy_test_running = True
559 gobject.timeout_add(100, self.timer_func)
560 self.handler.trigger_network_test()
561
562 def test_proxy_focus_event(self, w, direction):
563 if self.test_proxy_state in [self.TEST_NETWORK_PASSED, self.TEST_NETWORK_FAILED]:
564 self.set_test_proxy_state(self.TEST_NETWORK_INITIAL)
565 return False
566
567 def http_proxy_changed(self, e):
568 if not self.configuration.same_proxy:
569 return
570 if e == self.http_proxy:
571 [w.set_text(self.http_proxy.get_text()) for w in self.same_proxy_addresses]
572 else:
573 [w.set_text(self.http_proxy_port.get_text()) for w in self.same_proxy_ports]
574
575 def proxy_address_focus_out_event(self, w, direction):
576 text = w.get_text()
577 if not text:
578 return False
579 if text.find("//") == -1:
580 w.set_text("http://" + text)
581 return False
582
583 def set_test_proxy_state(self, state):
584 if self.test_proxy_state == state:
585 return
586 [self.proxy_table.remove(w) for w in self.test_gui_elements]
587 if state == self.TEST_NETWORK_INITIAL:
588 self.proxy_table.attach(self.test_network_button, 1, 2, 5, 6)
589 self.test_network_button.show()
590 elif state == self.TEST_NETWORK_RUNNING:
591 self.test_proxy_progress.set_rcstyle("running")
592 self.test_proxy_progress.set_text("Testing network configuration")
593 self.proxy_table.attach(self.test_proxy_progress, 0, 5, 5, 6, xpadding=4)
594 self.test_proxy_progress.show()
595 else: # passed or failed
596 self.dummy_progress.update(1.0)
597 if state == self.TEST_NETWORK_PASSED:
598 self.dummy_progress.set_text("Your network is properly configured")
599 self.dummy_progress.set_rcstyle("running")
600 else:
601 self.dummy_progress.set_text("Network test failed")
602 self.dummy_progress.set_rcstyle("fail")
603 self.proxy_table.attach(self.dummy_progress, 0, 4, 5, 6)
604 self.proxy_table.attach(self.retest_network_button, 4, 5, 5, 6, xpadding=4)
605 self.dummy_progress.show()
606 self.retest_network_button.show()
607 self.test_proxy_state = state
608
609 def create_network_page(self):
610 advanced_vbox = gtk.VBox(False, 6)
611 advanced_vbox.set_border_width(6)
612 self.same_proxy_addresses = []
613 self.same_proxy_ports = []
614 self.all_proxy_ports = []
615 self.all_proxy_addresses = []
616
617 sub_vbox = gtk.VBox(False, 6)
618 advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
619 label = self.gen_label_widget("<span weight=\"bold\">Set the proxies used when fetching source code</span>")
620 tooltip = "Set the proxies used when fetching source code. A blank field uses a direct internet connection."
621 info = HobInfoButton("<span weight=\"bold\">Set the proxies used when fetching source code</span>" + "*" + tooltip, self)
622 hbox = gtk.HBox(False, 12)
623 hbox.pack_start(label, expand=True, fill=True)
624 hbox.pack_start(info, expand=False, fill=False)
625 sub_vbox.pack_start(hbox, expand=False, fill=False)
626
627 proxy_test_focus = []
628 self.direct_checkbox = gtk.RadioButton(None, "Direct network connection")
629 proxy_test_focus.append(self.direct_checkbox)
630 self.direct_checkbox.set_tooltip_text("Check this box to use a direct internet connection with no proxy")
631 self.direct_checkbox.set_active(not self.configuration.enable_proxy)
632 sub_vbox.pack_start(self.direct_checkbox, expand=False, fill=False)
633
634 self.proxy_checkbox = gtk.RadioButton(self.direct_checkbox, "Manual proxy configuration")
635 proxy_test_focus.append(self.proxy_checkbox)
636 self.proxy_checkbox.set_tooltip_text("Check this box to manually set up a specific proxy")
637 self.proxy_checkbox.set_active(self.configuration.enable_proxy)
638 sub_vbox.pack_start(self.proxy_checkbox, expand=False, fill=False)
639
640 self.same_checkbox = gtk.CheckButton("Use the HTTP proxy for all protocols")
641 proxy_test_focus.append(self.same_checkbox)
642 self.same_checkbox.set_tooltip_text("Check this box to use the HTTP proxy for all five proxies")
643 self.same_checkbox.set_active(self.configuration.same_proxy)
644 hbox = gtk.HBox(False, 12)
645 hbox.pack_start(self.same_checkbox, expand=False, fill=False, padding=24)
646 sub_vbox.pack_start(hbox, expand=False, fill=False)
647
648 self.proxy_table = gtk.Table(6, 5, False)
649 self.http_proxy, self.http_proxy_port, self.http_proxy_details = self.gen_proxy_entry_widget(
650 "http", self, True, 0)
651 proxy_test_focus +=[self.http_proxy, self.http_proxy_port]
652 self.http_proxy.connect("changed", self.http_proxy_changed)
653 self.http_proxy_port.connect("changed", self.http_proxy_changed)
654
655 self.https_proxy, self.https_proxy_port, self.https_proxy_details = self.gen_proxy_entry_widget(
656 "https", self, True, 1)
657 proxy_test_focus += [self.https_proxy, self.https_proxy_port]
658 self.same_proxy_addresses.append(self.https_proxy)
659 self.same_proxy_ports.append(self.https_proxy_port)
660
661 self.ftp_proxy, self.ftp_proxy_port, self.ftp_proxy_details = self.gen_proxy_entry_widget(
662 "ftp", self, True, 2)
663 proxy_test_focus += [self.ftp_proxy, self.ftp_proxy_port]
664 self.same_proxy_addresses.append(self.ftp_proxy)
665 self.same_proxy_ports.append(self.ftp_proxy_port)
666
667 self.socks_proxy, self.socks_proxy_port, self.socks_proxy_details = self.gen_proxy_entry_widget(
668 "socks", self, True, 3)
669 proxy_test_focus += [self.socks_proxy, self.socks_proxy_port]
670 self.same_proxy_addresses.append(self.socks_proxy)
671 self.same_proxy_ports.append(self.socks_proxy_port)
672
673 self.cvs_proxy, self.cvs_proxy_port, self.cvs_proxy_details = self.gen_proxy_entry_widget(
674 "cvs", self, True, 4)
675 proxy_test_focus += [self.cvs_proxy, self.cvs_proxy_port]
676 self.same_proxy_addresses.append(self.cvs_proxy)
677 self.same_proxy_ports.append(self.cvs_proxy_port)
678 self.all_proxy_ports = self.same_proxy_ports + [self.http_proxy_port]
679 self.all_proxy_addresses = self.same_proxy_addresses + [self.http_proxy]
680 sub_vbox.pack_start(self.proxy_table, expand=False, fill=False)
681 self.proxy_table.show_all()
682
683 # Create the graphical elements for the network test feature, but don't display them yet
684 self.test_network_button = HobAltButton("Test network configuration")
685 self.test_network_button.connect("clicked", self.test_network_button_cb)
686 self.test_proxy_progress = HobProgressBar()
687 self.dummy_progress = HobProgressBar()
688 self.retest_network_button = HobAltButton("Retest")
689 self.retest_network_button.connect("clicked", self.test_network_button_cb)
690 self.test_gui_elements = [self.test_network_button, self.test_proxy_progress, self.dummy_progress, self.retest_network_button]
691 # Initialize the network tester
692 self.test_proxy_state = self.TEST_NETWORK_NONE
693 self.set_test_proxy_state(self.TEST_NETWORK_INITIAL)
694 self.proxy_test_passed_id = self.handler.connect("network-passed", lambda h:self.test_proxy_ended(True))
695 self.proxy_test_failed_id = self.handler.connect("network-failed", lambda h:self.test_proxy_ended(False))
696 [w.connect("focus-in-event", self.test_proxy_focus_event) for w in proxy_test_focus]
697 [w.connect("focus-out-event", self.proxy_address_focus_out_event) for w in self.all_proxy_addresses]
698
699 self.direct_checkbox.connect("toggled", self.proxy_checkbox_toggled_cb)
700 self.proxy_checkbox.connect("toggled", self.proxy_checkbox_toggled_cb)
701 self.same_checkbox.connect("toggled", self.same_checkbox_toggled_cb)
702
703 self.refresh_proxy_components()
704 return advanced_vbox
705
706 def switch_to_page(self, page_id):
707 self.nb.set_current_page(page_id)
708
709 def details_cb(self, button, parent, protocol):
710 self.save_proxy_data()
711 dialog = ProxyDetailsDialog(title = protocol.upper() + " Proxy Details",
712 user = self.configuration.proxies[protocol][1],
713 passwd = self.configuration.proxies[protocol][2],
714 parent = parent,
715 flags = gtk.DIALOG_MODAL
716 | gtk.DIALOG_DESTROY_WITH_PARENT
717 | gtk.DIALOG_NO_SEPARATOR)
718 dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
719 response = dialog.run()
720 if response == gtk.RESPONSE_OK:
721 self.configuration.proxies[protocol][1] = dialog.user
722 self.configuration.proxies[protocol][2] = dialog.passwd
723 self.refresh_proxy_components()
724 dialog.destroy()
725
726 def rootfs_combo_changed_cb(self, rootfs_combo, all_package_format, check_hbox):
727 combo_item = self.rootfs_combo.get_active_text()
728 for child in check_hbox.get_children():
729 if isinstance(child, gtk.CheckButton):
730 check_hbox.remove(child)
731 for format in all_package_format:
732 if format != combo_item:
733 check_button = gtk.CheckButton(format)
734 check_hbox.pack_start(check_button, expand=False, fill=False)
735 check_hbox.show_all()
736
737 def gen_pkgfmt_widget(self, curr_package_format, all_package_format, tooltip_combo="", tooltip_extra=""):
738 pkgfmt_hbox = gtk.HBox(False, 24)
739
740 rootfs_vbox = gtk.VBox(False, 6)
741 pkgfmt_hbox.pack_start(rootfs_vbox, expand=False, fill=False)
742
743 label = self.gen_label_widget("Root file system package format")
744 rootfs_vbox.pack_start(label, expand=False, fill=False)
745
746 rootfs_format = ""
747 if curr_package_format:
748 rootfs_format = curr_package_format.split()[0]
749
750 rootfs_format_widget, rootfs_combo = self.gen_combo_widget(rootfs_format, all_package_format, tooltip_combo)
751 rootfs_vbox.pack_start(rootfs_format_widget, expand=False, fill=False)
752
753 extra_vbox = gtk.VBox(False, 6)
754 pkgfmt_hbox.pack_start(extra_vbox, expand=False, fill=False)
755
756 label = self.gen_label_widget("Additional package formats")
757 extra_vbox.pack_start(label, expand=False, fill=False)
758
759 check_hbox = gtk.HBox(False, 12)
760 extra_vbox.pack_start(check_hbox, expand=False, fill=False)
761 for format in all_package_format:
762 if format != rootfs_format:
763 check_button = gtk.CheckButton(format)
764 is_active = (format in curr_package_format.split())
765 check_button.set_active(is_active)
766 check_hbox.pack_start(check_button, expand=False, fill=False)
767
768 info = HobInfoButton(tooltip_extra, self)
769 check_hbox.pack_end(info, expand=False, fill=False)
770
771 rootfs_combo.connect("changed", self.rootfs_combo_changed_cb, all_package_format, check_hbox)
772
773 pkgfmt_hbox.show_all()
774
775 return pkgfmt_hbox, rootfs_combo, check_hbox
776
777 def editable_settings_cell_edited(self, cell, path_string, new_text, model):
778 it = model.get_iter_from_string(path_string)
779 column = cell.get_data("column")
780 model.set(it, column, new_text)
781
782 def editable_settings_add_item_clicked(self, button, model):
783 new_item = ["##KEY##", "##VALUE##"]
784
785 iter = model.append()
786 model.set (iter,
787 0, new_item[0],
788 1, new_item[1],
789 )
790
791 def editable_settings_remove_item_clicked(self, button, treeview):
792 selection = treeview.get_selection()
793 model, iter = selection.get_selected()
794
795 if iter:
796 path = model.get_path(iter)[0]
797 model.remove(iter)
798
799 def gen_editable_settings(self, setting, tooltip=""):
800 setting_hbox = gtk.HBox(False, 12)
801
802 vbox = gtk.VBox(False, 12)
803 setting_hbox.pack_start(vbox, expand=True, fill=True)
804
805 setting_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
806 for key in setting.keys():
807 setting_store.set(setting_store.append(), 0, key, 1, setting[key])
808
809 setting_tree = gtk.TreeView(setting_store)
810 setting_tree.set_headers_visible(True)
811 setting_tree.set_size_request(300, 100)
812
813 col = gtk.TreeViewColumn('Key')
814 col.set_min_width(100)
815 col.set_max_width(150)
816 col.set_resizable(True)
817 col1 = gtk.TreeViewColumn('Value')
818 col1.set_min_width(100)
819 col1.set_max_width(150)
820 col1.set_resizable(True)
821 setting_tree.append_column(col)
822 setting_tree.append_column(col1)
823 cell = gtk.CellRendererText()
824 cell.set_property('width-chars', 10)
825 cell.set_property('editable', True)
826 cell.set_data("column", 0)
827 cell.connect("edited", self.editable_settings_cell_edited, setting_store)
828 cell1 = gtk.CellRendererText()
829 cell1.set_property('width-chars', 10)
830 cell1.set_property('editable', True)
831 cell1.set_data("column", 1)
832 cell1.connect("edited", self.editable_settings_cell_edited, setting_store)
833 col.pack_start(cell, True)
834 col1.pack_end(cell1, True)
835 col.set_attributes(cell, text=0)
836 col1.set_attributes(cell1, text=1)
837
838 scroll = gtk.ScrolledWindow()
839 scroll.set_shadow_type(gtk.SHADOW_IN)
840 scroll.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
841 scroll.add(setting_tree)
842 vbox.pack_start(scroll, expand=True, fill=True)
843
844 # some buttons
845 hbox = gtk.HBox(True, 6)
846 vbox.pack_start(hbox, False, False)
847
848 button = gtk.Button(stock=gtk.STOCK_ADD)
849 button.connect("clicked", self.editable_settings_add_item_clicked, setting_store)
850 hbox.pack_start(button)
851
852 button = gtk.Button(stock=gtk.STOCK_REMOVE)
853 button.connect("clicked", self.editable_settings_remove_item_clicked, setting_tree)
854 hbox.pack_start(button)
855
856 info = HobInfoButton(tooltip, self)
857 setting_hbox.pack_start(info, expand=False, fill=False)
858
859 return setting_hbox, setting_store
860
861 def create_others_page(self):
862 advanced_vbox = gtk.VBox(False, 6)
863 advanced_vbox.set_border_width(6)
864
865 sub_vbox = gtk.VBox(False, 6)
866 advanced_vbox.pack_start(sub_vbox, expand=True, fill=True)
867 label = self.gen_label_widget("<span weight=\"bold\">Add your own variables:</span>")
868 tooltip = "These are key/value pairs for your extra settings. Click \'Add\' and then directly edit the key and the value"
869 setting_widget, self.setting_store = self.gen_editable_settings(self.configuration.extra_setting,"<b>Add your own variables</b>" + "*" + tooltip)
870 sub_vbox.pack_start(label, expand=False, fill=False)
871 sub_vbox.pack_start(setting_widget, expand=True, fill=True)
872
873 return advanced_vbox
874
875 def create_visual_elements(self):
876 self.nb = gtk.Notebook()
877 self.nb.set_show_tabs(True)
878 self.nb.append_page(self.create_build_environment_page(), gtk.Label("Build environment"))
879 self.nb.append_page(self.create_shared_state_page(), gtk.Label("Shared state"))
880 self.nb.append_page(self.create_network_page(), gtk.Label("Network"))
881 self.nb.append_page(self.create_others_page(), gtk.Label("Others"))
882 self.nb.set_current_page(0)
883 self.vbox.pack_start(self.nb, expand=True, fill=True)
884 self.vbox.pack_end(gtk.HSeparator(), expand=True, fill=True)
885
886 self.show_all()
887
888 def destroy(self):
889 self.handler.disconnect(self.proxy_test_passed_id)
890 self.handler.disconnect(self.proxy_test_failed_id)
891 super(SimpleSettingsDialog, self).destroy()
diff --git a/bitbake/lib/bb/ui/crumbs/hobcolor.py b/bitbake/lib/bb/ui/crumbs/hobcolor.py
new file mode 100644
index 0000000000..3316542a20
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hobcolor.py
@@ -0,0 +1,38 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2012 Intel Corporation
5#
6# Authored by Shane Wang <shane.wang@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21class HobColors:
22 WHITE = "#ffffff"
23 PALE_GREEN = "#aaffaa"
24 ORANGE = "#eb8e68"
25 PALE_RED = "#ffaaaa"
26 GRAY = "#aaaaaa"
27 LIGHT_GRAY = "#dddddd"
28 SLIGHT_DARK = "#5f5f5f"
29 DARK = "#3c3b37"
30 BLACK = "#000000"
31 PALE_BLUE = "#53b8ff"
32 DEEP_RED = "#aa3e3e"
33 KHAKI = "#fff68f"
34
35 OK = WHITE
36 RUNNING = PALE_GREEN
37 WARNING = ORANGE
38 ERROR = PALE_RED
diff --git a/bitbake/lib/bb/ui/crumbs/hobeventhandler.py b/bitbake/lib/bb/ui/crumbs/hobeventhandler.py
new file mode 100644
index 0000000000..43edb70b08
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hobeventhandler.py
@@ -0,0 +1,639 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import gobject
23import logging
24import ast
25from bb.ui.crumbs.runningbuild import RunningBuild
26
27class HobHandler(gobject.GObject):
28
29 """
30 This object does BitBake event handling for the hob gui.
31 """
32 __gsignals__ = {
33 "package-formats-updated" : (gobject.SIGNAL_RUN_LAST,
34 gobject.TYPE_NONE,
35 (gobject.TYPE_PYOBJECT,)),
36 "config-updated" : (gobject.SIGNAL_RUN_LAST,
37 gobject.TYPE_NONE,
38 (gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
39 "command-succeeded" : (gobject.SIGNAL_RUN_LAST,
40 gobject.TYPE_NONE,
41 (gobject.TYPE_INT,)),
42 "command-failed" : (gobject.SIGNAL_RUN_LAST,
43 gobject.TYPE_NONE,
44 (gobject.TYPE_STRING,)),
45 "parsing-warning" : (gobject.SIGNAL_RUN_LAST,
46 gobject.TYPE_NONE,
47 (gobject.TYPE_STRING,)),
48 "sanity-failed" : (gobject.SIGNAL_RUN_LAST,
49 gobject.TYPE_NONE,
50 (gobject.TYPE_STRING, gobject.TYPE_INT)),
51 "generating-data" : (gobject.SIGNAL_RUN_LAST,
52 gobject.TYPE_NONE,
53 ()),
54 "data-generated" : (gobject.SIGNAL_RUN_LAST,
55 gobject.TYPE_NONE,
56 ()),
57 "parsing-started" : (gobject.SIGNAL_RUN_LAST,
58 gobject.TYPE_NONE,
59 (gobject.TYPE_PYOBJECT,)),
60 "parsing" : (gobject.SIGNAL_RUN_LAST,
61 gobject.TYPE_NONE,
62 (gobject.TYPE_PYOBJECT,)),
63 "parsing-completed" : (gobject.SIGNAL_RUN_LAST,
64 gobject.TYPE_NONE,
65 (gobject.TYPE_PYOBJECT,)),
66 "recipe-populated" : (gobject.SIGNAL_RUN_LAST,
67 gobject.TYPE_NONE,
68 ()),
69 "package-populated" : (gobject.SIGNAL_RUN_LAST,
70 gobject.TYPE_NONE,
71 ()),
72 "network-passed" : (gobject.SIGNAL_RUN_LAST,
73 gobject.TYPE_NONE,
74 ()),
75 "network-failed" : (gobject.SIGNAL_RUN_LAST,
76 gobject.TYPE_NONE,
77 ()),
78 }
79
80 (GENERATE_CONFIGURATION, GENERATE_RECIPES, GENERATE_PACKAGES, GENERATE_IMAGE, POPULATE_PACKAGEINFO, SANITY_CHECK, NETWORK_TEST) = range(7)
81 (SUB_PATH_LAYERS, SUB_FILES_DISTRO, SUB_FILES_MACH, SUB_FILES_SDKMACH, SUB_MATCH_CLASS, SUB_PARSE_CONFIG, SUB_SANITY_CHECK,
82 SUB_GNERATE_TGTS, SUB_GENERATE_PKGINFO, SUB_BUILD_RECIPES, SUB_BUILD_IMAGE, SUB_NETWORK_TEST) = range(12)
83
84 def __init__(self, server, recipe_model, package_model):
85 super(HobHandler, self).__init__()
86
87 self.build = RunningBuild(sequential=True)
88
89 self.recipe_model = recipe_model
90 self.package_model = package_model
91
92 self.commands_async = []
93 self.generating = False
94 self.current_phase = None
95 self.building = False
96 self.recipe_queue = []
97 self.package_queue = []
98
99 self.server = server
100 self.error_msg = ""
101 self.initcmd = None
102 self.parsing = False
103
104 def set_busy(self):
105 if not self.generating:
106 self.emit("generating-data")
107 self.generating = True
108
109 def clear_busy(self):
110 if self.generating:
111 self.emit("data-generated")
112 self.generating = False
113
114 def runCommand(self, commandline):
115 try:
116 result, error = self.server.runCommand(commandline)
117 if error:
118 raise Exception("Error running command '%s': %s" % (commandline, error))
119 return result
120 except Exception as e:
121 self.commands_async = []
122 self.clear_busy()
123 self.emit("command-failed", "Hob Exception - %s" % (str(e)))
124 return None
125
126 def run_next_command(self, initcmd=None):
127 if initcmd != None:
128 self.initcmd = initcmd
129
130 if self.commands_async:
131 self.set_busy()
132 next_command = self.commands_async.pop(0)
133 else:
134 self.clear_busy()
135 if self.initcmd != None:
136 self.emit("command-succeeded", self.initcmd)
137 return
138
139 if next_command == self.SUB_PATH_LAYERS:
140 self.runCommand(["findConfigFilePath", "bblayers.conf"])
141 elif next_command == self.SUB_FILES_DISTRO:
142 self.runCommand(["findConfigFiles", "DISTRO"])
143 elif next_command == self.SUB_FILES_MACH:
144 self.runCommand(["findConfigFiles", "MACHINE"])
145 elif next_command == self.SUB_FILES_SDKMACH:
146 self.runCommand(["findConfigFiles", "MACHINE-SDK"])
147 elif next_command == self.SUB_MATCH_CLASS:
148 self.runCommand(["findFilesMatchingInDir", "rootfs_", "classes"])
149 elif next_command == self.SUB_PARSE_CONFIG:
150 self.runCommand(["resetCooker"])
151 elif next_command == self.SUB_GNERATE_TGTS:
152 self.runCommand(["generateTargetsTree", "classes/image.bbclass", []])
153 elif next_command == self.SUB_GENERATE_PKGINFO:
154 self.runCommand(["triggerEvent", "bb.event.RequestPackageInfo()"])
155 elif next_command == self.SUB_SANITY_CHECK:
156 self.runCommand(["triggerEvent", "bb.event.SanityCheck()"])
157 elif next_command == self.SUB_NETWORK_TEST:
158 self.runCommand(["triggerEvent", "bb.event.NetworkTest()"])
159 elif next_command == self.SUB_BUILD_RECIPES:
160 self.clear_busy()
161 self.building = True
162 self.runCommand(["buildTargets", self.recipe_queue, self.default_task])
163 self.recipe_queue = []
164 elif next_command == self.SUB_BUILD_IMAGE:
165 self.clear_busy()
166 self.building = True
167 target = self.image
168
169 if self.base_image:
170 # Request the build of a custom image
171 self.generate_hob_base_image(target)
172 self.set_var_in_file("LINGUAS_INSTALL", "", "local.conf")
173 hobImage = self.runCommand(["matchFile", target + ".bb"])
174 if self.base_image != self.recipe_model.__custom_image__:
175 baseImage = self.runCommand(["matchFile", self.base_image + ".bb"])
176 version = self.runCommand(["generateNewImage", hobImage, baseImage, self.package_queue, True, ""])
177 target += version
178 self.recipe_model.set_custom_image_version(version)
179
180 targets = [target]
181 if self.toolchain_packages:
182 self.set_var_in_file("TOOLCHAIN_TARGET_TASK", " ".join(self.toolchain_packages), "local.conf")
183 targets.append(target + ":do_populate_sdk")
184
185 self.runCommand(["buildTargets", targets, self.default_task])
186
187 def display_error(self):
188 self.clear_busy()
189 self.emit("command-failed", self.error_msg)
190 self.error_msg = ""
191 if self.building:
192 self.building = False
193
194 def handle_event(self, event):
195 if not event:
196 return
197 if self.building:
198 self.current_phase = "building"
199 self.build.handle_event(event)
200
201 if isinstance(event, bb.event.PackageInfo):
202 self.package_model.populate(event._pkginfolist)
203 self.emit("package-populated")
204 self.run_next_command()
205
206 elif isinstance(event, bb.event.SanityCheckPassed):
207 reparse = self.runCommand(["getVariable", "BB_INVALIDCONF"]) or None
208 if reparse is True:
209 self.set_var_in_file("BB_INVALIDCONF", False, "local.conf")
210 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
211 self.commands_async.prepend(self.SUB_PARSE_CONFIG)
212 self.run_next_command()
213
214 elif isinstance(event, bb.event.SanityCheckFailed):
215 self.emit("sanity-failed", event._msg, event._network_error)
216
217 elif isinstance(event, logging.LogRecord):
218 if not self.building:
219 if event.levelno >= logging.ERROR:
220 formatter = bb.msg.BBLogFormatter()
221 msg = formatter.format(event)
222 self.error_msg += msg + '\n'
223 elif event.levelno >= logging.WARNING and self.parsing == True:
224 formatter = bb.msg.BBLogFormatter()
225 msg = formatter.format(event)
226 warn_msg = msg + '\n'
227 self.emit("parsing-warning", warn_msg)
228
229 elif isinstance(event, bb.event.TargetsTreeGenerated):
230 self.current_phase = "data generation"
231 if event._model:
232 self.recipe_model.populate(event._model)
233 self.emit("recipe-populated")
234 elif isinstance(event, bb.event.ConfigFilesFound):
235 self.current_phase = "configuration lookup"
236 var = event._variable
237 values = event._values
238 values.sort()
239 self.emit("config-updated", var, values)
240 elif isinstance(event, bb.event.ConfigFilePathFound):
241 self.current_phase = "configuration lookup"
242 elif isinstance(event, bb.event.FilesMatchingFound):
243 self.current_phase = "configuration lookup"
244 # FIXME: hard coding, should at least be a variable shared between
245 # here and the caller
246 if event._pattern == "rootfs_":
247 formats = []
248 for match in event._matches:
249 classname, sep, cls = match.rpartition(".")
250 fs, sep, format = classname.rpartition("_")
251 formats.append(format)
252 formats.sort()
253 self.emit("package-formats-updated", formats)
254 elif isinstance(event, bb.command.CommandCompleted):
255 self.current_phase = None
256 self.run_next_command()
257 elif isinstance(event, bb.command.CommandFailed):
258 if event.error not in ("Forced shutdown", "Stopped build"):
259 self.error_msg += event.error
260 self.commands_async = []
261 self.display_error()
262 elif isinstance(event, (bb.event.ParseStarted,
263 bb.event.CacheLoadStarted,
264 bb.event.TreeDataPreparationStarted,
265 )):
266 message = {}
267 message["eventname"] = bb.event.getName(event)
268 message["current"] = 0
269 message["total"] = None
270 message["title"] = "Parsing recipes"
271 self.emit("parsing-started", message)
272 if isinstance(event, bb.event.ParseStarted):
273 self.parsing = True
274 elif isinstance(event, (bb.event.ParseProgress,
275 bb.event.CacheLoadProgress,
276 bb.event.TreeDataPreparationProgress)):
277 message = {}
278 message["eventname"] = bb.event.getName(event)
279 message["current"] = event.current
280 message["total"] = event.total
281 message["title"] = "Parsing recipes"
282 self.emit("parsing", message)
283 elif isinstance(event, (bb.event.ParseCompleted,
284 bb.event.CacheLoadCompleted,
285 bb.event.TreeDataPreparationCompleted)):
286 message = {}
287 message["eventname"] = bb.event.getName(event)
288 message["current"] = event.total
289 message["total"] = event.total
290 message["title"] = "Parsing recipes"
291 self.emit("parsing-completed", message)
292 if isinstance(event, bb.event.ParseCompleted):
293 self.parsing = False
294 elif isinstance(event, bb.event.NetworkTestFailed):
295 self.emit("network-failed")
296 self.run_next_command()
297 elif isinstance(event, bb.event.NetworkTestPassed):
298 self.emit("network-passed")
299 self.run_next_command()
300
301 if self.error_msg and not self.commands_async:
302 self.display_error()
303
304 return
305
306 def init_cooker(self):
307 self.runCommand(["createConfigFile", ".hob.conf"])
308
309 def set_extra_inherit(self, bbclass):
310 self.append_var_in_file("INHERIT", bbclass, ".hob.conf")
311
312 def set_bblayers(self, bblayers):
313 self.set_var_in_file("BBLAYERS", " ".join(bblayers), "bblayers.conf")
314
315 def set_machine(self, machine):
316 if machine:
317 self.early_assign_var_in_file("MACHINE", machine, "local.conf")
318
319 def set_sdk_machine(self, sdk_machine):
320 self.set_var_in_file("SDKMACHINE", sdk_machine, "local.conf")
321
322 def set_image_fstypes(self, image_fstypes):
323 self.set_var_in_file("IMAGE_FSTYPES", image_fstypes, "local.conf")
324
325 def set_distro(self, distro):
326 self.set_var_in_file("DISTRO", distro, "local.conf")
327
328 def set_package_format(self, format):
329 package_classes = ""
330 for pkgfmt in format.split():
331 package_classes += ("package_%s" % pkgfmt + " ")
332 self.set_var_in_file("PACKAGE_CLASSES", package_classes, "local.conf")
333
334 def set_bbthreads(self, threads):
335 self.set_var_in_file("BB_NUMBER_THREADS", threads, "local.conf")
336
337 def set_pmake(self, threads):
338 pmake = "-j %s" % threads
339 self.set_var_in_file("PARALLEL_MAKE", pmake, "local.conf")
340
341 def set_dl_dir(self, directory):
342 self.set_var_in_file("DL_DIR", directory, "local.conf")
343
344 def set_sstate_dir(self, directory):
345 self.set_var_in_file("SSTATE_DIR", directory, "local.conf")
346
347 def set_sstate_mirrors(self, url):
348 self.set_var_in_file("SSTATE_MIRRORS", url, "local.conf")
349
350 def set_extra_size(self, image_extra_size):
351 self.set_var_in_file("IMAGE_ROOTFS_EXTRA_SPACE", str(image_extra_size), "local.conf")
352
353 def set_rootfs_size(self, image_rootfs_size):
354 self.set_var_in_file("IMAGE_ROOTFS_SIZE", str(image_rootfs_size), "local.conf")
355
356 def set_incompatible_license(self, incompat_license):
357 self.set_var_in_file("INCOMPATIBLE_LICENSE", incompat_license, "local.conf")
358
359 def set_extra_setting(self, extra_setting):
360 self.set_var_in_file("EXTRA_SETTING", extra_setting, "local.conf")
361
362 def set_extra_config(self, extra_setting):
363 old_extra_setting = self.runCommand(["getVariable", "EXTRA_SETTING"]) or {}
364 old_extra_setting = str(old_extra_setting)
365
366 old_extra_setting = ast.literal_eval(old_extra_setting)
367 if not type(old_extra_setting) == dict:
368 old_extra_setting = {}
369
370 # settings not changed
371 if old_extra_setting == extra_setting:
372 return
373
374 # remove the old EXTRA SETTING variable
375 self.remove_var_from_file("EXTRA_SETTING")
376
377 # remove old settings from conf
378 for key in old_extra_setting.keys():
379 if key not in extra_setting:
380 self.remove_var_from_file(key)
381
382 # add new settings
383 for key, value in extra_setting.iteritems():
384 self.set_var_in_file(key, value, "local.conf")
385
386 if extra_setting:
387 self.set_var_in_file("EXTRA_SETTING", extra_setting, "local.conf")
388
389 def set_http_proxy(self, http_proxy):
390 self.set_var_in_file("http_proxy", http_proxy, "local.conf")
391
392 def set_https_proxy(self, https_proxy):
393 self.set_var_in_file("https_proxy", https_proxy, "local.conf")
394
395 def set_ftp_proxy(self, ftp_proxy):
396 self.set_var_in_file("ftp_proxy", ftp_proxy, "local.conf")
397
398 def set_socks_proxy(self, socks_proxy):
399 self.set_var_in_file("all_proxy", socks_proxy, "local.conf")
400
401 def set_cvs_proxy(self, host, port):
402 self.set_var_in_file("CVS_PROXY_HOST", host, "local.conf")
403 self.set_var_in_file("CVS_PROXY_PORT", port, "local.conf")
404
405 def request_package_info(self):
406 self.commands_async.append(self.SUB_GENERATE_PKGINFO)
407 self.run_next_command(self.POPULATE_PACKAGEINFO)
408
409 def trigger_sanity_check(self):
410 self.commands_async.append(self.SUB_SANITY_CHECK)
411 self.run_next_command(self.SANITY_CHECK)
412
413 def trigger_network_test(self):
414 self.commands_async.append(self.SUB_NETWORK_TEST)
415 self.run_next_command(self.NETWORK_TEST)
416
417 def generate_configuration(self):
418 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
419 self.commands_async.append(self.SUB_PARSE_CONFIG)
420 self.commands_async.append(self.SUB_PATH_LAYERS)
421 self.commands_async.append(self.SUB_FILES_DISTRO)
422 self.commands_async.append(self.SUB_FILES_MACH)
423 self.commands_async.append(self.SUB_FILES_SDKMACH)
424 self.commands_async.append(self.SUB_MATCH_CLASS)
425 self.run_next_command(self.GENERATE_CONFIGURATION)
426
427 def generate_recipes(self):
428 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
429 self.commands_async.append(self.SUB_PARSE_CONFIG)
430 self.commands_async.append(self.SUB_GNERATE_TGTS)
431 self.run_next_command(self.GENERATE_RECIPES)
432
433 def generate_packages(self, tgts, default_task="build"):
434 targets = []
435 targets.extend(tgts)
436 self.recipe_queue = targets
437 self.default_task = default_task
438 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
439 self.commands_async.append(self.SUB_PARSE_CONFIG)
440 self.commands_async.append(self.SUB_BUILD_RECIPES)
441 self.run_next_command(self.GENERATE_PACKAGES)
442
443 def generate_image(self, image, base_image, image_packages=[], toolchain_packages=[], default_task="build"):
444 self.image = image
445 self.base_image = base_image
446 self.package_queue = image_packages
447 self.toolchain_packages = toolchain_packages
448 self.default_task = default_task
449 self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
450 self.commands_async.append(self.SUB_PARSE_CONFIG)
451 self.commands_async.append(self.SUB_BUILD_IMAGE)
452 self.run_next_command(self.GENERATE_IMAGE)
453
454 def generate_new_image(self, image, base_image, package_queue, description):
455 if base_image:
456 base_image = self.runCommand(["matchFile", self.base_image + ".bb"])
457 self.runCommand(["generateNewImage", image, base_image, package_queue, False, description])
458
459 def generate_hob_base_image(self, hob_image):
460 image_dir = self.get_topdir() + "/recipes/images/"
461 recipe_name = hob_image + ".bb"
462 self.ensure_dir(image_dir)
463 self.generate_new_image(image_dir + recipe_name, None, [], "")
464
465 def ensure_dir(self, directory):
466 self.runCommand(["ensureDir", directory])
467
468 def build_succeeded_async(self):
469 self.building = False
470
471 def build_failed_async(self):
472 self.initcmd = None
473 self.commands_async = []
474 self.building = False
475
476 def cancel_parse(self):
477 self.runCommand(["stateForceShutdown"])
478
479 def cancel_build(self, force=False):
480 if force:
481 # Force the cooker to stop as quickly as possible
482 self.runCommand(["stateForceShutdown"])
483 else:
484 # Wait for tasks to complete before shutting down, this helps
485 # leave the workdir in a usable state
486 self.runCommand(["stateShutdown"])
487
488 def reset_build(self):
489 self.build.reset()
490
491 def get_logfile(self):
492 return self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0]
493
494 def get_topdir(self):
495 return self.runCommand(["getVariable", "TOPDIR"]) or ""
496
497 def _remove_redundant(self, string):
498 ret = []
499 for i in string.split():
500 if i not in ret:
501 ret.append(i)
502 return " ".join(ret)
503
504 def set_var_in_file(self, var, val, default_file=None):
505 self.runCommand(["enableDataTracking"])
506 self.server.runCommand(["setVarFile", var, val, default_file, "set"])
507 self.runCommand(["disableDataTracking"])
508
509 def early_assign_var_in_file(self, var, val, default_file=None):
510 self.runCommand(["enableDataTracking"])
511 self.server.runCommand(["setVarFile", var, val, default_file, "earlyAssign"])
512 self.runCommand(["disableDataTracking"])
513
514 def remove_var_from_file(self, var):
515 self.server.runCommand(["removeVarFile", var])
516
517 def append_var_in_file(self, var, val, default_file=None):
518 self.server.runCommand(["setVarFile", var, val, default_file, "append"])
519
520 def append_to_bbfiles(self, val):
521 bbfiles = self.runCommand(["getVariable", "BBFILES", "False"]) or ""
522 bbfiles = bbfiles.split()
523 if val not in bbfiles:
524 self.append_var_in_file("BBFILES", val, "bblayers.conf")
525
526 def get_parameters(self):
527 # retrieve the parameters from bitbake
528 params = {}
529 params["core_base"] = self.runCommand(["getVariable", "COREBASE"]) or ""
530 params["layer"] = self.runCommand(["getVariable", "BBLAYERS"]) or ""
531 params["layers_non_removable"] = self.runCommand(["getVariable", "BBLAYERS_NON_REMOVABLE"]) or ""
532 params["dldir"] = self.runCommand(["getVariable", "DL_DIR"]) or ""
533 params["machine"] = self.runCommand(["getVariable", "MACHINE"]) or ""
534 params["distro"] = self.runCommand(["getVariable", "DISTRO"]) or "defaultsetup"
535 params["pclass"] = self.runCommand(["getVariable", "PACKAGE_CLASSES"]) or ""
536 params["sstatedir"] = self.runCommand(["getVariable", "SSTATE_DIR"]) or ""
537 params["sstatemirror"] = self.runCommand(["getVariable", "SSTATE_MIRRORS"]) or ""
538
539 num_threads = self.runCommand(["getCpuCount"])
540 if not num_threads:
541 num_threads = 1
542 max_threads = 65536
543 else:
544 try:
545 num_threads = int(num_threads)
546 max_threads = 16 * num_threads
547 except:
548 num_threads = 1
549 max_threads = 65536
550 params["max_threads"] = max_threads
551
552 bbthread = self.runCommand(["getVariable", "BB_NUMBER_THREADS"])
553 if not bbthread:
554 bbthread = num_threads
555 else:
556 try:
557 bbthread = int(bbthread)
558 except:
559 bbthread = num_threads
560 params["bbthread"] = bbthread
561
562 pmake = self.runCommand(["getVariable", "PARALLEL_MAKE"])
563 if not pmake:
564 pmake = num_threads
565 elif isinstance(pmake, int):
566 pass
567 else:
568 try:
569 pmake = int(pmake.lstrip("-j "))
570 except:
571 pmake = num_threads
572 params["pmake"] = "-j %s" % pmake
573
574 params["image_addr"] = self.runCommand(["getVariable", "DEPLOY_DIR_IMAGE"]) or ""
575
576 image_extra_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_EXTRA_SPACE"])
577 if not image_extra_size:
578 image_extra_size = 0
579 else:
580 try:
581 image_extra_size = int(image_extra_size)
582 except:
583 image_extra_size = 0
584 params["image_extra_size"] = image_extra_size
585
586 image_rootfs_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_SIZE"])
587 if not image_rootfs_size:
588 image_rootfs_size = 0
589 else:
590 try:
591 image_rootfs_size = int(image_rootfs_size)
592 except:
593 image_rootfs_size = 0
594 params["image_rootfs_size"] = image_rootfs_size
595
596 image_overhead_factor = self.runCommand(["getVariable", "IMAGE_OVERHEAD_FACTOR"])
597 if not image_overhead_factor:
598 image_overhead_factor = 1
599 else:
600 try:
601 image_overhead_factor = float(image_overhead_factor)
602 except:
603 image_overhead_factor = 1
604 params['image_overhead_factor'] = image_overhead_factor
605
606 params["incompat_license"] = self._remove_redundant(self.runCommand(["getVariable", "INCOMPATIBLE_LICENSE"]) or "")
607 params["sdk_machine"] = self.runCommand(["getVariable", "SDKMACHINE"]) or self.runCommand(["getVariable", "SDK_ARCH"]) or ""
608
609 params["image_fstypes"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_FSTYPES"]) or "")
610
611 params["image_types"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_TYPES"]) or "")
612
613 params["conf_version"] = self.runCommand(["getVariable", "CONF_VERSION"]) or ""
614 params["lconf_version"] = self.runCommand(["getVariable", "LCONF_VERSION"]) or ""
615
616 params["runnable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_IMAGE_TYPES"]) or "")
617 params["runnable_machine_patterns"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_MACHINE_PATTERNS"]) or "")
618 params["deployable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "DEPLOYABLE_IMAGE_TYPES"]) or "")
619 params["kernel_image_type"] = self.runCommand(["getVariable", "KERNEL_IMAGETYPE"]) or ""
620 params["tmpdir"] = self.runCommand(["getVariable", "TMPDIR"]) or ""
621 params["distro_version"] = self.runCommand(["getVariable", "DISTRO_VERSION"]) or ""
622 params["target_os"] = self.runCommand(["getVariable", "TARGET_OS"]) or ""
623 params["target_arch"] = self.runCommand(["getVariable", "TARGET_ARCH"]) or ""
624 params["tune_pkgarch"] = self.runCommand(["getVariable", "TUNE_PKGARCH"]) or ""
625 params["bb_version"] = self.runCommand(["getVariable", "BB_MIN_VERSION"]) or ""
626
627 params["default_task"] = self.runCommand(["getVariable", "BB_DEFAULT_TASK"]) or "build"
628
629 params["socks_proxy"] = self.runCommand(["getVariable", "all_proxy"]) or ""
630 params["http_proxy"] = self.runCommand(["getVariable", "http_proxy"]) or ""
631 params["ftp_proxy"] = self.runCommand(["getVariable", "ftp_proxy"]) or ""
632 params["https_proxy"] = self.runCommand(["getVariable", "https_proxy"]) or ""
633
634 params["cvs_proxy_host"] = self.runCommand(["getVariable", "CVS_PROXY_HOST"]) or ""
635 params["cvs_proxy_port"] = self.runCommand(["getVariable", "CVS_PROXY_PORT"]) or ""
636
637 params["image_white_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_WHITE_PATTERN"]) or ""
638 params["image_black_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_BLACK_PATTERN"]) or ""
639 return params
diff --git a/bitbake/lib/bb/ui/crumbs/hoblistmodel.py b/bitbake/lib/bb/ui/crumbs/hoblistmodel.py
new file mode 100644
index 0000000000..50df156f4d
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hoblistmodel.py
@@ -0,0 +1,903 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2011 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import gobject
25from bb.ui.crumbs.hobpages import HobPage
26
27#
28# PackageListModel
29#
30class PackageListModel(gtk.ListStore):
31 """
32 This class defines an gtk.ListStore subclass which will convert the output
33 of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
34 providing convenience functions to access gtk.TreeModel subclasses which
35 provide filtered views of the data.
36 """
37
38 (COL_NAME, COL_VER, COL_REV, COL_RNM, COL_SEC, COL_SUM, COL_RDEP, COL_RPROV, COL_SIZE, COL_RCP, COL_BINB, COL_INC, COL_FADE_INC, COL_FONT, COL_FLIST) = range(15)
39
40 __gsignals__ = {
41 "package-selection-changed" : (gobject.SIGNAL_RUN_LAST,
42 gobject.TYPE_NONE,
43 ()),
44 }
45
46 __toolchain_required_packages__ = ["packagegroup-core-standalone-sdk-target", "packagegroup-core-standalone-sdk-target-dbg"]
47
48 def __init__(self):
49 self.rprov_pkg = {}
50 gtk.ListStore.__init__ (self,
51 gobject.TYPE_STRING,
52 gobject.TYPE_STRING,
53 gobject.TYPE_STRING,
54 gobject.TYPE_STRING,
55 gobject.TYPE_STRING,
56 gobject.TYPE_STRING,
57 gobject.TYPE_STRING,
58 gobject.TYPE_STRING,
59 gobject.TYPE_STRING,
60 gobject.TYPE_STRING,
61 gobject.TYPE_STRING,
62 gobject.TYPE_BOOLEAN,
63 gobject.TYPE_BOOLEAN,
64 gobject.TYPE_STRING,
65 gobject.TYPE_STRING)
66 self.sort_column_id, self.sort_order = PackageListModel.COL_NAME, gtk.SORT_ASCENDING
67
68 """
69 Find the model path for the item_name
70 Returns the path in the model or None
71 """
72 def find_path_for_item(self, item_name):
73 pkg = item_name
74 if item_name not in self.pn_path.keys():
75 if item_name not in self.rprov_pkg.keys():
76 return None
77 pkg = self.rprov_pkg[item_name]
78 if pkg not in self.pn_path.keys():
79 return None
80
81 return self.pn_path[pkg]
82
83 def find_item_for_path(self, item_path):
84 return self[item_path][self.COL_NAME]
85
86 """
87 Helper function to determine whether an item is an item specified by filter
88 """
89 def tree_model_filter(self, model, it, filter):
90 name = model.get_value(it, self.COL_NAME)
91
92 for key in filter.keys():
93 if key == self.COL_NAME:
94 if filter[key] != 'Search packages by name':
95 if name and filter[key] not in name:
96 return False
97 else:
98 if model.get_value(it, key) not in filter[key]:
99 return False
100 self.filtered_nb += 1
101 return True
102
103 """
104 Create, if required, and return a filtered gtk.TreeModelSort
105 containing only the items specified by filter
106 """
107 def tree_model(self, filter, excluded_items_ahead=False, included_items_ahead=False, search_data=None, initial=False):
108 model = self.filter_new()
109 self.filtered_nb = 0
110 model.set_visible_func(self.tree_model_filter, filter)
111
112 sort = gtk.TreeModelSort(model)
113 sort.connect ('sort-column-changed', self.sort_column_changed_cb)
114 if initial:
115 sort.set_sort_column_id(PackageListModel.COL_NAME, gtk.SORT_ASCENDING)
116 sort.set_default_sort_func(None)
117 elif excluded_items_ahead:
118 sort.set_default_sort_func(self.exclude_item_sort_func, search_data)
119 elif included_items_ahead:
120 sort.set_default_sort_func(self.include_item_sort_func, search_data)
121 else:
122 if search_data and search_data!='Search recipes by name' and search_data!='Search package groups by name':
123 sort.set_default_sort_func(self.sort_func, search_data)
124 else:
125 sort.set_sort_column_id(self.sort_column_id, self.sort_order)
126 sort.set_default_sort_func(None)
127
128 sort.set_sort_func(PackageListModel.COL_INC, self.sort_column, PackageListModel.COL_INC)
129 sort.set_sort_func(PackageListModel.COL_SIZE, self.sort_column, PackageListModel.COL_SIZE)
130 sort.set_sort_func(PackageListModel.COL_BINB, self.sort_binb_column)
131 sort.set_sort_func(PackageListModel.COL_RCP, self.sort_column, PackageListModel.COL_RCP)
132 return sort
133
134 def sort_column_changed_cb (self, data):
135 self.sort_column_id, self.sort_order = data.get_sort_column_id ()
136
137 def sort_column(self, model, row1, row2, col):
138 value1 = model.get_value(row1, col)
139 value2 = model.get_value(row2, col)
140 if col==PackageListModel.COL_SIZE:
141 value1 = HobPage._string_to_size(value1)
142 value2 = HobPage._string_to_size(value2)
143
144 cmp_res = cmp(value1, value2)
145 if cmp_res!=0:
146 if col==PackageListModel.COL_INC:
147 return -cmp_res
148 else:
149 return cmp_res
150 else:
151 name1 = model.get_value(row1, PackageListModel.COL_NAME)
152 name2 = model.get_value(row2, PackageListModel.COL_NAME)
153 return cmp(name1,name2)
154
155 def sort_binb_column(self, model, row1, row2):
156 value1 = model.get_value(row1, PackageListModel.COL_BINB)
157 value2 = model.get_value(row2, PackageListModel.COL_BINB)
158 value1_list = value1.split(', ')
159 value2_list = value2.split(', ')
160
161 value1 = value1_list[0]
162 value2 = value2_list[0]
163
164 cmp_res = cmp(value1, value2)
165 if cmp_res==0:
166 cmp_size = cmp(len(value1_list), len(value2_list))
167 if cmp_size==0:
168 name1 = model.get_value(row1, PackageListModel.COL_NAME)
169 name2 = model.get_value(row2, PackageListModel.COL_NAME)
170 return cmp(name1,name2)
171 else:
172 return cmp_size
173 else:
174 return cmp_res
175
176 def exclude_item_sort_func(self, model, iter1, iter2, user_data=None):
177 if user_data:
178 val1 = model.get_value(iter1, PackageListModel.COL_NAME)
179 val2 = model.get_value(iter2, PackageListModel.COL_NAME)
180 return self.cmp_vals(val1, val2, user_data)
181 else:
182 val1 = model.get_value(iter1, PackageListModel.COL_FADE_INC)
183 val2 = model.get_value(iter2, PackageListModel.COL_INC)
184 return ((val1 == True) and (val2 == False))
185
186 def include_item_sort_func(self, model, iter1, iter2, user_data=None):
187 if user_data:
188 val1 = model.get_value(iter1, PackageListModel.COL_NAME)
189 val2 = model.get_value(iter2, PackageListModel.COL_NAME)
190 return self.cmp_vals(val1, val2, user_data)
191 else:
192 val1 = model.get_value(iter1, PackageListModel.COL_INC)
193 val2 = model.get_value(iter2, PackageListModel.COL_INC)
194 return ((val1 == False) and (val2 == True))
195
196 def sort_func(self, model, iter1, iter2, user_data):
197 val1 = model.get_value(iter1, PackageListModel.COL_NAME)
198 val2 = model.get_value(iter2, PackageListModel.COL_NAME)
199 return self.cmp_vals(val1, val2, user_data)
200
201 def cmp_vals(self, val1, val2, user_data):
202 if val1 is None or val2 is None:
203 return 0
204 elif val1.startswith(user_data) and not val2.startswith(user_data):
205 return -1
206 elif not val1.startswith(user_data) and val2.startswith(user_data):
207 return 1
208 else:
209 return cmp(val1, val2)
210
211 def convert_vpath_to_path(self, view_model, view_path):
212 # view_model is the model sorted
213 # get the path of the model filtered
214 filtered_model_path = view_model.convert_path_to_child_path(view_path)
215 # get the model filtered
216 filtered_model = view_model.get_model()
217 # get the path of the original model
218 path = filtered_model.convert_path_to_child_path(filtered_model_path)
219 return path
220
221 def convert_path_to_vpath(self, view_model, path):
222 it = view_model.get_iter_first()
223 while it:
224 name = self.find_item_for_path(path)
225 view_name = view_model.get_value(it, PackageListModel.COL_NAME)
226 if view_name == name:
227 view_path = view_model.get_path(it)
228 return view_path
229 it = view_model.iter_next(it)
230 return None
231
232 """
233 The populate() function takes as input the data from a
234 bb.event.PackageInfo event and populates the package list.
235 """
236 def populate(self, pkginfolist):
237 # First clear the model, in case repopulating
238 self.clear()
239
240 def getpkgvalue(pkgdict, key, pkgname, defaultval = None):
241 value = pkgdict.get('%s_%s' % (key, pkgname), None)
242 if not value:
243 value = pkgdict.get(key, defaultval)
244 return value
245
246 for pkginfo in pkginfolist:
247 pn = pkginfo['PN']
248 pv = pkginfo['PV']
249 pr = pkginfo['PR']
250 pkg = pkginfo['PKG']
251 pkgv = getpkgvalue(pkginfo, 'PKGV', pkg)
252 pkgr = getpkgvalue(pkginfo, 'PKGR', pkg)
253 # PKGSIZE is artificial, will always be overridden with the package name if present
254 pkgsize = int(pkginfo.get('PKGSIZE_%s' % pkg, "0"))
255 # PKG_%s is the renamed version
256 pkg_rename = pkginfo.get('PKG_%s' % pkg, "")
257 # The rest may be overridden or not
258 section = getpkgvalue(pkginfo, 'SECTION', pkg, "")
259 summary = getpkgvalue(pkginfo, 'SUMMARY', pkg, "")
260 rdep = getpkgvalue(pkginfo, 'RDEPENDS', pkg, "")
261 rrec = getpkgvalue(pkginfo, 'RRECOMMENDS', pkg, "")
262 rprov = getpkgvalue(pkginfo, 'RPROVIDES', pkg, "")
263 files_list = getpkgvalue(pkginfo, 'FILES_INFO', pkg, "")
264 for i in rprov.split():
265 self.rprov_pkg[i] = pkg
266
267 recipe = pn + '-' + pv + '-' + pr
268
269 allow_empty = getpkgvalue(pkginfo, 'ALLOW_EMPTY', pkg, "")
270
271 if pkgsize == 0 and not allow_empty:
272 continue
273
274 size = HobPage._size_to_string(pkgsize)
275 self.set(self.append(), self.COL_NAME, pkg, self.COL_VER, pkgv,
276 self.COL_REV, pkgr, self.COL_RNM, pkg_rename,
277 self.COL_SEC, section, self.COL_SUM, summary,
278 self.COL_RDEP, rdep + ' ' + rrec,
279 self.COL_RPROV, rprov, self.COL_SIZE, size,
280 self.COL_RCP, recipe, self.COL_BINB, "",
281 self.COL_INC, False, self.COL_FONT, '10', self.COL_FLIST, files_list)
282
283 self.pn_path = {}
284 it = self.get_iter_first()
285 while it:
286 pn = self.get_value(it, self.COL_NAME)
287 path = self.get_path(it)
288 self.pn_path[pn] = path
289 it = self.iter_next(it)
290
291 """
292 Update the model, send out the notification.
293 """
294 def selection_change_notification(self):
295 self.emit("package-selection-changed")
296
297 """
298 Check whether the item at item_path is included or not
299 """
300 def path_included(self, item_path):
301 return self[item_path][self.COL_INC]
302
303 """
304 Add this item, and any of its dependencies, to the image contents
305 """
306 def include_item(self, item_path, binb=""):
307 if self.path_included(item_path):
308 return
309
310 item_name = self[item_path][self.COL_NAME]
311 item_deps = self[item_path][self.COL_RDEP]
312
313 self[item_path][self.COL_INC] = True
314
315 item_bin = self[item_path][self.COL_BINB].split(', ')
316 if binb and not binb in item_bin:
317 item_bin.append(binb)
318 self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
319
320 if item_deps:
321 # Ensure all of the items deps are included and, where appropriate,
322 # add this item to their COL_BINB
323 for dep in item_deps.split(" "):
324 if dep.startswith('('):
325 continue
326 # If the contents model doesn't already contain dep, add it
327 dep_path = self.find_path_for_item(dep)
328 if not dep_path:
329 continue
330 dep_included = self.path_included(dep_path)
331
332 if dep_included and not dep in item_bin:
333 # don't set the COL_BINB to this item if the target is an
334 # item in our own COL_BINB
335 dep_bin = self[dep_path][self.COL_BINB].split(', ')
336 if not item_name in dep_bin:
337 dep_bin.append(item_name)
338 self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
339 elif not dep_included:
340 self.include_item(dep_path, binb=item_name)
341
342 def exclude_item(self, item_path):
343 if not self.path_included(item_path):
344 return
345
346 self[item_path][self.COL_INC] = False
347
348 item_name = self[item_path][self.COL_NAME]
349 item_deps = self[item_path][self.COL_RDEP]
350 if item_deps:
351 for dep in item_deps.split(" "):
352 if dep.startswith('('):
353 continue
354 dep_path = self.find_path_for_item(dep)
355 if not dep_path:
356 continue
357 dep_bin = self[dep_path][self.COL_BINB].split(', ')
358 if item_name in dep_bin:
359 dep_bin.remove(item_name)
360 self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
361
362 item_bin = self[item_path][self.COL_BINB].split(', ')
363 if item_bin:
364 for binb in item_bin:
365 binb_path = self.find_path_for_item(binb)
366 if not binb_path:
367 continue
368 self.exclude_item(binb_path)
369
370 """
371 Empty self.contents by setting the include of each entry to None
372 """
373 def reset(self):
374 it = self.get_iter_first()
375 while it:
376 self.set(it,
377 self.COL_INC, False,
378 self.COL_BINB, "")
379 it = self.iter_next(it)
380
381 self.selection_change_notification()
382
383 def get_selected_packages(self):
384 packagelist = []
385
386 it = self.get_iter_first()
387 while it:
388 if self.get_value(it, self.COL_INC):
389 name = self.get_value(it, self.COL_NAME)
390 packagelist.append(name)
391 it = self.iter_next(it)
392
393 return packagelist
394
395 def get_user_selected_packages(self):
396 packagelist = []
397
398 it = self.get_iter_first()
399 while it:
400 if self.get_value(it, self.COL_INC):
401 binb = self.get_value(it, self.COL_BINB)
402 if binb == "User Selected":
403 name = self.get_value(it, self.COL_NAME)
404 packagelist.append(name)
405 it = self.iter_next(it)
406
407 return packagelist
408
409 def get_selected_packages_toolchain(self):
410 packagelist = []
411
412 it = self.get_iter_first()
413 while it:
414 if self.get_value(it, self.COL_INC):
415 name = self.get_value(it, self.COL_NAME)
416 if name.endswith("-dev") or name.endswith("-dbg"):
417 packagelist.append(name)
418 it = self.iter_next(it)
419
420 return list(set(packagelist + self.__toolchain_required_packages__));
421
422 """
423 Package model may be incomplete, therefore when calling the
424 set_selected_packages(), some packages will not be set included.
425 Return the un-set packages list.
426 """
427 def set_selected_packages(self, packagelist, user_selected=False):
428 left = []
429 binb = 'User Selected' if user_selected else ''
430 for pn in packagelist:
431 if pn in self.pn_path.keys():
432 path = self.pn_path[pn]
433 self.include_item(item_path=path, binb=binb)
434 else:
435 left.append(pn)
436
437 self.selection_change_notification()
438 return left
439
440 """
441 Return the selected package size, unit is B.
442 """
443 def get_packages_size(self):
444 packages_size = 0
445 it = self.get_iter_first()
446 while it:
447 if self.get_value(it, self.COL_INC):
448 str_size = self.get_value(it, self.COL_SIZE)
449 if not str_size:
450 continue
451
452 packages_size += HobPage._string_to_size(str_size)
453
454 it = self.iter_next(it)
455 return packages_size
456
457 """
458 Resync the state of included items to a backup column before performing the fadeout visible effect
459 """
460 def resync_fadeout_column(self, model_first_iter=None):
461 it = model_first_iter
462 while it:
463 active = self.get_value(it, self.COL_INC)
464 self.set(it, self.COL_FADE_INC, active)
465 it = self.iter_next(it)
466
467#
468# RecipeListModel
469#
470class RecipeListModel(gtk.ListStore):
471 """
472 This class defines an gtk.ListStore subclass which will convert the output
473 of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
474 providing convenience functions to access gtk.TreeModel subclasses which
475 provide filtered views of the data.
476 """
477 (COL_NAME, COL_DESC, COL_LIC, COL_GROUP, COL_DEPS, COL_BINB, COL_TYPE, COL_INC, COL_IMG, COL_INSTALL, COL_PN, COL_FADE_INC, COL_SUMMARY, COL_VERSION,
478 COL_REVISION, COL_HOMEPAGE, COL_BUGTRACKER, COL_FILE) = range(18)
479
480 __custom_image__ = "Start with an empty image recipe"
481
482 __gsignals__ = {
483 "recipe-selection-changed" : (gobject.SIGNAL_RUN_LAST,
484 gobject.TYPE_NONE,
485 ()),
486 }
487
488 """
489 """
490 def __init__(self):
491 gtk.ListStore.__init__ (self,
492 gobject.TYPE_STRING,
493 gobject.TYPE_STRING,
494 gobject.TYPE_STRING,
495 gobject.TYPE_STRING,
496 gobject.TYPE_STRING,
497 gobject.TYPE_STRING,
498 gobject.TYPE_STRING,
499 gobject.TYPE_BOOLEAN,
500 gobject.TYPE_BOOLEAN,
501 gobject.TYPE_STRING,
502 gobject.TYPE_STRING,
503 gobject.TYPE_BOOLEAN,
504 gobject.TYPE_STRING,
505 gobject.TYPE_STRING,
506 gobject.TYPE_STRING,
507 gobject.TYPE_STRING,
508 gobject.TYPE_STRING,
509 gobject.TYPE_STRING)
510 self.sort_column_id, self.sort_order = RecipeListModel.COL_NAME, gtk.SORT_ASCENDING
511
512 """
513 Find the model path for the item_name
514 Returns the path in the model or None
515 """
516 def find_path_for_item(self, item_name):
517 if self.non_target_name(item_name) or item_name not in self.pn_path.keys():
518 return None
519 else:
520 return self.pn_path[item_name]
521
522 def find_item_for_path(self, item_path):
523 return self[item_path][self.COL_NAME]
524
525 """
526 Helper method to determine whether name is a target pn
527 """
528 def non_target_name(self, name):
529 if name and ('-native' in name):
530 return True
531 return False
532
533 """
534 Helper function to determine whether an item is an item specified by filter
535 """
536 def tree_model_filter(self, model, it, filter):
537 name = model.get_value(it, self.COL_NAME)
538 if self.non_target_name(name):
539 return False
540
541 for key in filter.keys():
542 if key == self.COL_NAME:
543 if filter[key] != 'Search recipes by name' and filter[key] != 'Search package groups by name':
544 if filter[key] not in name:
545 return False
546 else:
547 if model.get_value(it, key) not in filter[key]:
548 return False
549 self.filtered_nb += 1
550
551 return True
552
553 def exclude_item_sort_func(self, model, iter1, iter2, user_data=None):
554 if user_data:
555 val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
556 val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
557 return self.cmp_vals(val1, val2, user_data)
558 else:
559 val1 = model.get_value(iter1, RecipeListModel.COL_FADE_INC)
560 val2 = model.get_value(iter2, RecipeListModel.COL_INC)
561 return ((val1 == True) and (val2 == False))
562
563 def include_item_sort_func(self, model, iter1, iter2, user_data=None):
564 if user_data:
565 val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
566 val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
567 return self.cmp_vals(val1, val2, user_data)
568 else:
569 val1 = model.get_value(iter1, RecipeListModel.COL_INC)
570 val2 = model.get_value(iter2, RecipeListModel.COL_INC)
571 return ((val1 == False) and (val2 == True))
572
573 def sort_func(self, model, iter1, iter2, user_data):
574 val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
575 val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
576 return self.cmp_vals(val1, val2, user_data)
577
578 def cmp_vals(self, val1, val2, user_data):
579 if val1 is None or val2 is None:
580 return 0
581 elif val1.startswith(user_data) and not val2.startswith(user_data):
582 return -1
583 elif not val1.startswith(user_data) and val2.startswith(user_data):
584 return 1
585 else:
586 return cmp(val1, val2)
587
588 """
589 Create, if required, and return a filtered gtk.TreeModelSort
590 containing only the items specified by filter
591 """
592 def tree_model(self, filter, excluded_items_ahead=False, included_items_ahead=False, search_data=None, initial=False):
593 model = self.filter_new()
594 self.filtered_nb = 0
595 model.set_visible_func(self.tree_model_filter, filter)
596
597 sort = gtk.TreeModelSort(model)
598 sort.connect ('sort-column-changed', self.sort_column_changed_cb)
599 if initial:
600 sort.set_sort_column_id(RecipeListModel.COL_NAME, gtk.SORT_ASCENDING)
601 sort.set_default_sort_func(None)
602 elif excluded_items_ahead:
603 sort.set_default_sort_func(self.exclude_item_sort_func, search_data)
604 elif included_items_ahead:
605 sort.set_default_sort_func(self.include_item_sort_func, search_data)
606 else:
607 if search_data and search_data!='Search recipes by name' and search_data!='Search package groups by name':
608 sort.set_default_sort_func(self.sort_func, search_data)
609 else:
610 sort.set_sort_column_id(self.sort_column_id, self.sort_order)
611 sort.set_default_sort_func(None)
612
613 sort.set_sort_func(RecipeListModel.COL_INC, self.sort_column, RecipeListModel.COL_INC)
614 sort.set_sort_func(RecipeListModel.COL_GROUP, self.sort_column, RecipeListModel.COL_GROUP)
615 sort.set_sort_func(RecipeListModel.COL_BINB, self.sort_binb_column)
616 sort.set_sort_func(RecipeListModel.COL_LIC, self.sort_column, RecipeListModel.COL_LIC)
617 return sort
618
619 def sort_column_changed_cb (self, data):
620 self.sort_column_id, self.sort_order = data.get_sort_column_id ()
621
622 def sort_column(self, model, row1, row2, col):
623 value1 = model.get_value(row1, col)
624 value2 = model.get_value(row2, col)
625 cmp_res = cmp(value1, value2)
626 if cmp_res!=0:
627 if col==RecipeListModel.COL_INC:
628 return -cmp_res
629 else:
630 return cmp_res
631 else:
632 name1 = model.get_value(row1, RecipeListModel.COL_NAME)
633 name2 = model.get_value(row2, RecipeListModel.COL_NAME)
634 return cmp(name1,name2)
635
636 def sort_binb_column(self, model, row1, row2):
637 value1 = model.get_value(row1, RecipeListModel.COL_BINB)
638 value2 = model.get_value(row2, RecipeListModel.COL_BINB)
639 value1_list = value1.split(', ')
640 value2_list = value2.split(', ')
641
642 value1 = value1_list[0]
643 value2 = value2_list[0]
644
645 cmp_res = cmp(value1, value2)
646 if cmp_res==0:
647 cmp_size = cmp(len(value1_list), len(value2_list))
648 if cmp_size==0:
649 name1 = model.get_value(row1, RecipeListModel.COL_NAME)
650 name2 = model.get_value(row2, RecipeListModel.COL_NAME)
651 return cmp(name1,name2)
652 else:
653 return cmp_size
654 else:
655 return cmp_res
656
657 def convert_vpath_to_path(self, view_model, view_path):
658 filtered_model_path = view_model.convert_path_to_child_path(view_path)
659 filtered_model = view_model.get_model()
660
661 # get the path of the original model
662 path = filtered_model.convert_path_to_child_path(filtered_model_path)
663 return path
664
665 def convert_path_to_vpath(self, view_model, path):
666 it = view_model.get_iter_first()
667 while it:
668 name = self.find_item_for_path(path)
669 view_name = view_model.get_value(it, RecipeListModel.COL_NAME)
670 if view_name == name:
671 view_path = view_model.get_path(it)
672 return view_path
673 it = view_model.iter_next(it)
674 return None
675
676 """
677 The populate() function takes as input the data from a
678 bb.event.TargetsTreeGenerated event and populates the RecipeList.
679 """
680 def populate(self, event_model):
681 # First clear the model, in case repopulating
682 self.clear()
683
684 # dummy image for prompt
685 self.set_in_list(self.__custom_image__, "Use 'Edit image recipe' to customize recipes and packages " \
686 "to be included in your image ")
687
688 for item in event_model["pn"]:
689 name = item
690 desc = event_model["pn"][item]["description"]
691 lic = event_model["pn"][item]["license"]
692 group = event_model["pn"][item]["section"]
693 inherits = event_model["pn"][item]["inherits"]
694 summary = event_model["pn"][item]["summary"]
695 version = event_model["pn"][item]["version"]
696 revision = event_model["pn"][item]["prevision"]
697 homepage = event_model["pn"][item]["homepage"]
698 bugtracker = event_model["pn"][item]["bugtracker"]
699 filename = event_model["pn"][item]["filename"]
700 install = []
701
702 depends = event_model["depends"].get(item, []) + event_model["rdepends-pn"].get(item, [])
703
704 if ('packagegroup.bbclass' in " ".join(inherits)):
705 atype = 'packagegroup'
706 elif ('/image.bbclass' in " ".join(inherits)):
707 if "edited" not in name:
708 atype = 'image'
709 install = event_model["rdepends-pkg"].get(item, []) + event_model["rrecs-pkg"].get(item, [])
710 elif ('meta-' in name):
711 atype = 'toolchain'
712 elif (name == 'dummy-image' or name == 'dummy-toolchain'):
713 atype = 'dummy'
714 else:
715 atype = 'recipe'
716
717 self.set(self.append(), self.COL_NAME, item, self.COL_DESC, desc,
718 self.COL_LIC, lic, self.COL_GROUP, group,
719 self.COL_DEPS, " ".join(depends), self.COL_BINB, "",
720 self.COL_TYPE, atype, self.COL_INC, False,
721 self.COL_IMG, False, self.COL_INSTALL, " ".join(install), self.COL_PN, item,
722 self.COL_SUMMARY, summary, self.COL_VERSION, version, self.COL_REVISION, revision,
723 self.COL_HOMEPAGE, homepage, self.COL_BUGTRACKER, bugtracker,
724 self.COL_FILE, filename)
725
726 self.pn_path = {}
727 it = self.get_iter_first()
728 while it:
729 pn = self.get_value(it, self.COL_NAME)
730 path = self.get_path(it)
731 self.pn_path[pn] = path
732 it = self.iter_next(it)
733
734 def set_in_list(self, item, desc):
735 self.set(self.append(), self.COL_NAME, item,
736 self.COL_DESC, desc,
737 self.COL_LIC, "", self.COL_GROUP, "",
738 self.COL_DEPS, "", self.COL_BINB, "",
739 self.COL_TYPE, "image", self.COL_INC, False,
740 self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, item,
741 self.COL_SUMMARY, "", self.COL_VERSION, "", self.COL_REVISION, "",
742 self.COL_HOMEPAGE, "", self.COL_BUGTRACKER, "")
743 self.pn_path = {}
744 it = self.get_iter_first()
745 while it:
746 pn = self.get_value(it, self.COL_NAME)
747 path = self.get_path(it)
748 self.pn_path[pn] = path
749 it = self.iter_next(it)
750
751 """
752 Update the model, send out the notification.
753 """
754 def selection_change_notification(self):
755 self.emit("recipe-selection-changed")
756
757 def path_included(self, item_path):
758 return self[item_path][self.COL_INC]
759
760 """
761 Add this item, and any of its dependencies, to the image contents
762 """
763 def include_item(self, item_path, binb="", image_contents=False):
764 if self.path_included(item_path):
765 return
766
767 item_name = self[item_path][self.COL_NAME]
768 item_deps = self[item_path][self.COL_DEPS]
769
770 self[item_path][self.COL_INC] = True
771
772 item_bin = self[item_path][self.COL_BINB].split(', ')
773 if binb and not binb in item_bin:
774 item_bin.append(binb)
775 self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
776
777 # We want to do some magic with things which are brought in by the
778 # base image so tag them as so
779 if image_contents:
780 self[item_path][self.COL_IMG] = True
781
782 if item_deps:
783 # Ensure all of the items deps are included and, where appropriate,
784 # add this item to their COL_BINB
785 for dep in item_deps.split(" "):
786 # If the contents model doesn't already contain dep, add it
787 dep_path = self.find_path_for_item(dep)
788 if not dep_path:
789 continue
790 dep_included = self.path_included(dep_path)
791
792 if dep_included and not dep in item_bin:
793 # don't set the COL_BINB to this item if the target is an
794 # item in our own COL_BINB
795 dep_bin = self[dep_path][self.COL_BINB].split(', ')
796 if not item_name in dep_bin:
797 dep_bin.append(item_name)
798 self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
799 elif not dep_included:
800 self.include_item(dep_path, binb=item_name, image_contents=image_contents)
801 dep_bin = self[item_path][self.COL_BINB].split(', ')
802 if self[item_path][self.COL_NAME] in dep_bin:
803 dep_bin.remove(self[item_path][self.COL_NAME])
804 self[item_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
805
806 def exclude_item(self, item_path):
807 if not self.path_included(item_path):
808 return
809
810 self[item_path][self.COL_INC] = False
811
812 item_name = self[item_path][self.COL_NAME]
813 item_deps = self[item_path][self.COL_DEPS]
814 if item_deps:
815 for dep in item_deps.split(" "):
816 dep_path = self.find_path_for_item(dep)
817 if not dep_path:
818 continue
819 dep_bin = self[dep_path][self.COL_BINB].split(', ')
820 if item_name in dep_bin:
821 dep_bin.remove(item_name)
822 self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
823
824 item_bin = self[item_path][self.COL_BINB].split(', ')
825 if item_bin:
826 for binb in item_bin:
827 binb_path = self.find_path_for_item(binb)
828 if not binb_path:
829 continue
830 self.exclude_item(binb_path)
831
832 def reset(self):
833 it = self.get_iter_first()
834 while it:
835 self.set(it,
836 self.COL_INC, False,
837 self.COL_BINB, "",
838 self.COL_IMG, False)
839 it = self.iter_next(it)
840
841 self.selection_change_notification()
842
843 """
844 Returns two lists. One of user selected recipes and the other containing
845 all selected recipes
846 """
847 def get_selected_recipes(self):
848 allrecipes = []
849 userrecipes = []
850
851 it = self.get_iter_first()
852 while it:
853 if self.get_value(it, self.COL_INC):
854 name = self.get_value(it, self.COL_PN)
855 type = self.get_value(it, self.COL_TYPE)
856 if type != "image":
857 allrecipes.append(name)
858 sel = "User Selected" in self.get_value(it, self.COL_BINB)
859 if sel:
860 userrecipes.append(name)
861 it = self.iter_next(it)
862
863 return list(set(userrecipes)), list(set(allrecipes))
864
865 def set_selected_recipes(self, recipelist):
866 for pn in recipelist:
867 if pn in self.pn_path.keys():
868 path = self.pn_path[pn]
869 self.include_item(item_path=path,
870 binb="User Selected")
871 self.selection_change_notification()
872
873 def get_selected_image(self):
874 it = self.get_iter_first()
875 while it:
876 if self.get_value(it, self.COL_INC):
877 name = self.get_value(it, self.COL_PN)
878 type = self.get_value(it, self.COL_TYPE)
879 if type == "image":
880 sel = "User Selected" in self.get_value(it, self.COL_BINB)
881 if sel:
882 return name
883 it = self.iter_next(it)
884 return None
885
886 def set_selected_image(self, img):
887 if not img:
888 return
889 self.reset()
890 path = self.find_path_for_item(img)
891 self.include_item(item_path=path,
892 binb="User Selected",
893 image_contents=True)
894 self.selection_change_notification()
895
896 def set_custom_image_version(self, version):
897 self.custom_image_version = version
898
899 def get_custom_image_version(self):
900 return self.custom_image_version
901
902 def is_custom_image(self):
903 return self.get_selected_image() == self.__custom_image__
diff --git a/bitbake/lib/bb/ui/crumbs/hobpages.py b/bitbake/lib/bb/ui/crumbs/hobpages.py
new file mode 100755
index 0000000000..0fd3598c3a
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hobpages.py
@@ -0,0 +1,128 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24from bb.ui.crumbs.hobcolor import HobColors
25from bb.ui.crumbs.hobwidget import hwc
26
27#
28# HobPage: the super class for all Hob-related pages
29#
30class HobPage (gtk.VBox):
31
32 def __init__(self, builder, title = None):
33 super(HobPage, self).__init__(False, 0)
34 self.builder = builder
35 self.builder_width, self.builder_height = self.builder.size_request()
36
37 if not title:
38 self.title = "Hob -- Image Creator"
39 else:
40 self.title = title
41 self.title_label = gtk.Label()
42
43 self.box_group_area = gtk.VBox(False, 12)
44 self.box_group_area.set_size_request(self.builder_width - 73 - 73, self.builder_height - 88 - 15 - 15)
45 self.group_align = gtk.Alignment(xalign = 0, yalign=0.5, xscale=1, yscale=1)
46 self.group_align.set_padding(15, 15, 73, 73)
47 self.group_align.add(self.box_group_area)
48 self.box_group_area.set_homogeneous(False)
49
50 def set_title(self, title):
51 self.title = title
52 self.title_label.set_markup("<span size='x-large'>%s</span>" % self.title)
53
54 def add_onto_top_bar(self, widget = None, padding = 0):
55 # the top button occupies 1/7 of the page height
56 # setup an event box
57 eventbox = gtk.EventBox()
58 style = eventbox.get_style().copy()
59 style.bg[gtk.STATE_NORMAL] = eventbox.get_colormap().alloc_color(HobColors.LIGHT_GRAY, False, False)
60 eventbox.set_style(style)
61 eventbox.set_size_request(-1, 88)
62
63 hbox = gtk.HBox()
64
65 self.title_label = gtk.Label()
66 self.title_label.set_markup("<span size='x-large'>%s</span>" % self.title)
67 hbox.pack_start(self.title_label, expand=False, fill=False, padding=20)
68
69 if widget:
70 # add the widget in the event box
71 hbox.pack_end(widget, expand=False, fill=False, padding=padding)
72 eventbox.add(hbox)
73
74 return eventbox
75
76 def span_tag(self, size="medium", weight="normal", forground="#1c1c1c"):
77 span_tag = "weight='%s' foreground='%s' size='%s'" % (weight, forground, size)
78 return span_tag
79
80 def append_toolbar_button(self, toolbar, buttonname, icon_disp, icon_hovor, tip, cb):
81 # Create a button and append it on the toolbar according to button name
82 icon = gtk.Image()
83 icon_display = icon_disp
84 icon_hover = icon_hovor
85 pix_buffer = gtk.gdk.pixbuf_new_from_file(icon_display)
86 icon.set_from_pixbuf(pix_buffer)
87 tip_text = tip
88 button = toolbar.append_item(buttonname, tip, None, icon, cb)
89 return button
90
91 @staticmethod
92 def _size_to_string(size):
93 try:
94 if not size:
95 size_str = "0 B"
96 else:
97 if len(str(int(size))) > 6:
98 size_str = '%.1f' % (size*1.0/(1024*1024)) + ' MB'
99 elif len(str(int(size))) > 3:
100 size_str = '%.1f' % (size*1.0/1024) + ' KB'
101 else:
102 size_str = str(size) + ' B'
103 except:
104 size_str = "0 B"
105 return size_str
106
107 @staticmethod
108 def _string_to_size(str_size):
109 try:
110 if not str_size:
111 size = 0
112 else:
113 unit = str_size.split()
114 if len(unit) > 1:
115 if unit[1] == 'MB':
116 size = float(unit[0])*1024*1024
117 elif unit[1] == 'KB':
118 size = float(unit[0])*1024
119 elif unit[1] == 'B':
120 size = float(unit[0])
121 else:
122 size = 0
123 else:
124 size = float(unit[0])
125 except:
126 size = 0
127 return size
128
diff --git a/bitbake/lib/bb/ui/crumbs/hobwidget.py b/bitbake/lib/bb/ui/crumbs/hobwidget.py
new file mode 100644
index 0000000000..2b969c146e
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/hobwidget.py
@@ -0,0 +1,904 @@
1# BitBake Graphical GTK User Interface
2#
3# Copyright (C) 2011-2012 Intel Corporation
4#
5# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
6# Authored by Shane Wang <shane.wang@intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20import gtk
21import gobject
22import os
23import os.path
24import sys
25import pango, pangocairo
26import cairo
27import math
28
29from bb.ui.crumbs.hobcolor import HobColors
30from bb.ui.crumbs.persistenttooltip import PersistentTooltip
31
32class hwc:
33
34 MAIN_WIN_WIDTH = 1024
35 MAIN_WIN_HEIGHT = 700
36
37class hic:
38
39 HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("ui/icons/"))
40
41 ICON_RCIPE_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_display.png'))
42 ICON_RCIPE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_hover.png'))
43 ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_display.png'))
44 ICON_PACKAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_hover.png'))
45 ICON_LAYERS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_display.png'))
46 ICON_LAYERS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_hover.png'))
47 ICON_IMAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_display.png'))
48 ICON_IMAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_hover.png'))
49 ICON_SETTINGS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_display.png'))
50 ICON_SETTINGS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_hover.png'))
51 ICON_INFO_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
52 ICON_INFO_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_hover.png'))
53 ICON_INDI_CONFIRM_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/confirmation.png'))
54 ICON_INDI_ERROR_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/denied.png'))
55 ICON_INDI_REMOVE_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove.png'))
56 ICON_INDI_REMOVE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove-hover.png'))
57 ICON_INDI_ADD_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add.png'))
58 ICON_INDI_ADD_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add-hover.png'))
59 ICON_INDI_REFRESH_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/refresh.png'))
60 ICON_INDI_ALERT_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/alert.png'))
61 ICON_INDI_TICK_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/tick.png'))
62 ICON_INDI_INFO_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/info.png'))
63
64class HobViewTable (gtk.VBox):
65 """
66 A VBox to contain the table for different recipe views and package view
67 """
68 __gsignals__ = {
69 "toggled" : (gobject.SIGNAL_RUN_LAST,
70 gobject.TYPE_NONE,
71 (gobject.TYPE_PYOBJECT,
72 gobject.TYPE_STRING,
73 gobject.TYPE_INT,
74 gobject.TYPE_PYOBJECT,)),
75 "row-activated" : (gobject.SIGNAL_RUN_LAST,
76 gobject.TYPE_NONE,
77 (gobject.TYPE_PYOBJECT,
78 gobject.TYPE_PYOBJECT,)),
79 "cell-fadeinout-stopped" : (gobject.SIGNAL_RUN_LAST,
80 gobject.TYPE_NONE,
81 (gobject.TYPE_PYOBJECT,
82 gobject.TYPE_PYOBJECT,
83 gobject.TYPE_PYOBJECT,)),
84 }
85
86 def __init__(self, columns, name):
87 gtk.VBox.__init__(self, False, 6)
88 self.table_tree = gtk.TreeView()
89 self.table_tree.set_headers_visible(True)
90 self.table_tree.set_headers_clickable(True)
91 self.table_tree.set_rules_hint(True)
92 self.table_tree.set_enable_tree_lines(True)
93 self.table_tree.get_selection().set_mode(gtk.SELECTION_SINGLE)
94 self.toggle_columns = []
95 self.table_tree.connect("row-activated", self.row_activated_cb)
96 self.top_bar = None
97 self.tab_name = name
98
99 for i, column in enumerate(columns):
100 col_name = column['col_name']
101 col = gtk.TreeViewColumn(col_name)
102 col.set_clickable(True)
103 col.set_resizable(True)
104 if self.tab_name.startswith('Included'):
105 if col_name!='Included':
106 col.set_sort_column_id(column['col_id'])
107 else:
108 col.set_sort_column_id(column['col_id'])
109 if 'col_min' in column.keys():
110 col.set_min_width(column['col_min'])
111 if 'col_max' in column.keys():
112 col.set_max_width(column['col_max'])
113 if 'expand' in column.keys():
114 col.set_expand(True)
115 self.table_tree.append_column(col)
116
117 if (not 'col_style' in column.keys()) or column['col_style'] == 'text':
118 cell = gtk.CellRendererText()
119 col.pack_start(cell, True)
120 col.set_attributes(cell, text=column['col_id'])
121 if 'col_t_id' in column.keys():
122 col.add_attribute(cell, 'font', column['col_t_id'])
123 elif column['col_style'] == 'check toggle':
124 cell = HobCellRendererToggle()
125 cell.set_property('activatable', True)
126 cell.connect("toggled", self.toggled_cb, i, self.table_tree)
127 cell.connect_render_state_changed(self.stop_cell_fadeinout_cb, self.table_tree)
128 self.toggle_id = i
129 col.pack_end(cell, True)
130 col.set_attributes(cell, active=column['col_id'])
131 self.toggle_columns.append(col_name)
132 if 'col_group' in column.keys():
133 col.set_cell_data_func(cell, self.set_group_number_cb)
134 elif column['col_style'] == 'radio toggle':
135 cell = gtk.CellRendererToggle()
136 cell.set_property('activatable', True)
137 cell.set_radio(True)
138 cell.connect("toggled", self.toggled_cb, i, self.table_tree)
139 self.toggle_id = i
140 col.pack_end(cell, True)
141 col.set_attributes(cell, active=column['col_id'])
142 self.toggle_columns.append(col_name)
143 elif column['col_style'] == 'binb':
144 cell = gtk.CellRendererText()
145 col.pack_start(cell, True)
146 col.set_cell_data_func(cell, self.display_binb_cb, column['col_id'])
147 if 'col_t_id' in column.keys():
148 col.add_attribute(cell, 'font', column['col_t_id'])
149
150 self.scroll = gtk.ScrolledWindow()
151 self.scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
152 self.scroll.add(self.table_tree)
153
154 self.pack_end(self.scroll, True, True, 0)
155
156 def add_no_result_bar(self, entry):
157 color = HobColors.KHAKI
158 self.top_bar = gtk.EventBox()
159 self.top_bar.set_size_request(-1, 70)
160 self.top_bar.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
161 self.top_bar.set_flags(gtk.CAN_DEFAULT)
162 self.top_bar.grab_default()
163
164 no_result_tab = gtk.Table(5, 20, True)
165 self.top_bar.add(no_result_tab)
166
167 label = gtk.Label()
168 label.set_alignment(0.0, 0.5)
169 title = "No results matching your search"
170 label.set_markup("<span size='x-large'><b>%s</b></span>" % title)
171 no_result_tab.attach(label, 1, 14, 1, 4)
172
173 clear_button = HobButton("Clear search")
174 clear_button.set_tooltip_text("Clear search query")
175 clear_button.connect('clicked', self.set_search_entry_clear_cb, entry)
176 no_result_tab.attach(clear_button, 16, 19, 1, 4)
177
178 self.pack_start(self.top_bar, False, True, 12)
179 self.top_bar.show_all()
180
181 def set_search_entry_clear_cb(self, button, search):
182 if search.get_editable() == True:
183 search.set_text("")
184 search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
185 search.grab_focus()
186
187 def display_binb_cb(self, col, cell, model, it, col_id):
188 binb = model.get_value(it, col_id)
189 # Just display the first item
190 if binb:
191 bin = binb.split(', ')
192 total_no = len(bin)
193 if total_no > 1 and bin[0] == "User Selected":
194 if total_no > 2:
195 present_binb = bin[1] + ' (+' + str(total_no - 1) + ')'
196 else:
197 present_binb = bin[1]
198 else:
199 if total_no > 1:
200 present_binb = bin[0] + ' (+' + str(total_no - 1) + ')'
201 else:
202 present_binb = bin[0]
203 cell.set_property('text', present_binb)
204 else:
205 cell.set_property('text', "")
206 return True
207
208 def set_model(self, tree_model):
209 self.table_tree.set_model(tree_model)
210
211 def toggle_default(self):
212 model = self.table_tree.get_model()
213 if not model:
214 return
215 iter = model.get_iter_first()
216 if iter:
217 rowpath = model.get_path(iter)
218 model[rowpath][self.toggle_id] = True
219
220 def toggled_cb(self, cell, path, columnid, tree):
221 self.emit("toggled", cell, path, columnid, tree)
222
223 def row_activated_cb(self, tree, path, view_column):
224 if not view_column.get_title() in self.toggle_columns:
225 self.emit("row-activated", tree.get_model(), path)
226
227 def stop_cell_fadeinout_cb(self, ctrl, cell, tree):
228 self.emit("cell-fadeinout-stopped", ctrl, cell, tree)
229
230 def set_group_number_cb(self, col, cell, model, iter):
231 if model and (model.iter_parent(iter) == None):
232 cell.cell_attr["number_of_children"] = model.iter_n_children(iter)
233 else:
234 cell.cell_attr["number_of_children"] = 0
235
236 def connect_group_selection(self, cb_func):
237 self.table_tree.get_selection().connect("changed", cb_func)
238
239"""
240A method to calculate a softened value for the colour of widget when in the
241provided state.
242
243widget: the widget whose style to use
244state: the state of the widget to use the style for
245
246Returns a string value representing the softened colour
247"""
248def soften_color(widget, state=gtk.STATE_NORMAL):
249 # this colour munging routine is heavily inspired bu gdu_util_get_mix_color()
250 # from gnome-disk-utility:
251 # http://git.gnome.org/browse/gnome-disk-utility/tree/src/gdu-gtk/gdu-gtk.c?h=gnome-3-0
252 blend = 0.7
253 style = widget.get_style()
254 color = style.text[state]
255 color.red = color.red * blend + style.base[state].red * (1.0 - blend)
256 color.green = color.green * blend + style.base[state].green * (1.0 - blend)
257 color.blue = color.blue * blend + style.base[state].blue * (1.0 - blend)
258 return color.to_string()
259
260class BaseHobButton(gtk.Button):
261 """
262 A gtk.Button subclass which follows the visual design of Hob for primary
263 action buttons
264
265 label: the text to display as the button's label
266 """
267 def __init__(self, label):
268 gtk.Button.__init__(self, label)
269 HobButton.style_button(self)
270
271 @staticmethod
272 def style_button(button):
273 style = button.get_style()
274 style = gtk.rc_get_style_by_paths(gtk.settings_get_default(), 'gtk-button', 'gtk-button', gobject.TYPE_NONE)
275
276 button.set_flags(gtk.CAN_DEFAULT)
277 button.grab_default()
278
279# label = "<span size='x-large'><b>%s</b></span>" % gobject.markup_escape_text(button.get_label())
280 label = button.get_label()
281 button.set_label(label)
282 button.child.set_use_markup(True)
283
284class HobButton(BaseHobButton):
285 """
286 A gtk.Button subclass which follows the visual design of Hob for primary
287 action buttons
288
289 label: the text to display as the button's label
290 """
291 def __init__(self, label):
292 BaseHobButton.__init__(self, label)
293 HobButton.style_button(self)
294
295class HobAltButton(BaseHobButton):
296 """
297 A gtk.Button subclass which has no relief, and so is more discrete
298 """
299 def __init__(self, label):
300 BaseHobButton.__init__(self, label)
301 HobAltButton.style_button(self)
302
303 """
304 A callback for the state-changed event to ensure the text is displayed
305 differently when the widget is not sensitive
306 """
307 @staticmethod
308 def desensitise_on_state_change_cb(button, state):
309 if not button.get_property("sensitive"):
310 HobAltButton.set_text(button, False)
311 else:
312 HobAltButton.set_text(button, True)
313
314 """
315 Set the button label with an appropriate colour for the current widget state
316 """
317 @staticmethod
318 def set_text(button, sensitive=True):
319 if sensitive:
320 colour = HobColors.PALE_BLUE
321 else:
322 colour = HobColors.LIGHT_GRAY
323 button.set_label("<span size='large' color='%s'><b>%s</b></span>" % (colour, gobject.markup_escape_text(button.text)))
324 button.child.set_use_markup(True)
325
326class HobImageButton(gtk.Button):
327 """
328 A gtk.Button with an icon and two rows of text, the second of which is
329 displayed in a blended colour.
330
331 primary_text: the main button label
332 secondary_text: optional second line of text
333 icon_path: path to the icon file to display on the button
334 """
335 def __init__(self, primary_text, secondary_text="", icon_path="", hover_icon_path=""):
336 gtk.Button.__init__(self)
337 self.set_relief(gtk.RELIEF_NONE)
338
339 self.icon_path = icon_path
340 self.hover_icon_path = hover_icon_path
341
342 hbox = gtk.HBox(False, 10)
343 hbox.show()
344 self.add(hbox)
345 self.icon = gtk.Image()
346 self.icon.set_from_file(self.icon_path)
347 self.icon.set_alignment(0.5, 0.0)
348 self.icon.show()
349 if self.hover_icon_path and len(self.hover_icon_path):
350 self.connect("enter-notify-event", self.set_hover_icon_cb)
351 self.connect("leave-notify-event", self.set_icon_cb)
352 hbox.pack_start(self.icon, False, False, 0)
353 label = gtk.Label()
354 label.set_alignment(0.0, 0.5)
355 colour = soften_color(label)
356 mark = "<span size='x-large'>%s</span>\n<span size='medium' fgcolor='%s' weight='ultralight'>%s</span>" % (primary_text, colour, secondary_text)
357 label.set_markup(mark)
358 label.show()
359 hbox.pack_start(label, True, True, 0)
360
361 def set_hover_icon_cb(self, widget, event):
362 self.icon.set_from_file(self.hover_icon_path)
363
364 def set_icon_cb(self, widget, event):
365 self.icon.set_from_file(self.icon_path)
366
367class HobInfoButton(gtk.EventBox):
368 """
369 This class implements a button-like widget per the Hob visual and UX designs
370 which will display a persistent tooltip, with the contents of tip_markup, when
371 clicked.
372
373 tip_markup: the Pango Markup to be displayed in the persistent tooltip
374 """
375 def __init__(self, tip_markup, parent=None):
376 gtk.EventBox.__init__(self)
377 self.image = gtk.Image()
378 self.image.set_from_file(
379 hic.ICON_INFO_DISPLAY_FILE)
380 self.image.show()
381 self.add(self.image)
382 self.tip_markup = tip_markup
383 self.my_parent = parent
384
385 self.set_events(gtk.gdk.BUTTON_RELEASE |
386 gtk.gdk.ENTER_NOTIFY_MASK |
387 gtk.gdk.LEAVE_NOTIFY_MASK)
388
389 self.connect("button-release-event", self.button_release_cb)
390 self.connect("enter-notify-event", self.mouse_in_cb)
391 self.connect("leave-notify-event", self.mouse_out_cb)
392
393 """
394 When the mouse click is released emulate a button-click and show the associated
395 PersistentTooltip
396 """
397 def button_release_cb(self, widget, event):
398 from bb.ui.crumbs.hig.propertydialog import PropertyDialog
399 self.dialog = PropertyDialog(title = '',
400 parent = self.my_parent,
401 information = self.tip_markup,
402 flags = gtk.DIALOG_DESTROY_WITH_PARENT
403 | gtk.DIALOG_NO_SEPARATOR)
404
405 button = self.dialog.add_button("Close", gtk.RESPONSE_CANCEL)
406 HobAltButton.style_button(button)
407 button.connect("clicked", lambda w: self.dialog.destroy())
408 self.dialog.show_all()
409 self.dialog.run()
410
411 """
412 Change to the prelight image when the mouse enters the widget
413 """
414 def mouse_in_cb(self, widget, event):
415 self.image.set_from_file(hic.ICON_INFO_HOVER_FILE)
416
417 """
418 Change to the stock image when the mouse enters the widget
419 """
420 def mouse_out_cb(self, widget, event):
421 self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
422
423class HobIndicator(gtk.DrawingArea):
424 def __init__(self, count):
425 gtk.DrawingArea.__init__(self)
426 # Set no window for transparent background
427 self.set_has_window(False)
428 self.set_size_request(38,38)
429 # We need to pass through button clicks
430 self.add_events(gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
431
432 self.connect('expose-event', self.expose)
433
434 self.count = count
435 self.color = HobColors.GRAY
436
437 def expose(self, widget, event):
438 if self.count and self.count > 0:
439 ctx = widget.window.cairo_create()
440
441 x, y, w, h = self.allocation
442
443 ctx.set_operator(cairo.OPERATOR_OVER)
444 ctx.set_source_color(gtk.gdk.color_parse(self.color))
445 ctx.translate(w/2, h/2)
446 ctx.arc(x, y, min(w,h)/2 - 2, 0, 2*math.pi)
447 ctx.fill_preserve()
448
449 layout = self.create_pango_layout(str(self.count))
450 textw, texth = layout.get_pixel_size()
451 x = (w/2)-(textw/2) + x
452 y = (h/2) - (texth/2) + y
453 ctx.move_to(x, y)
454 self.window.draw_layout(self.style.light_gc[gtk.STATE_NORMAL], int(x), int(y), layout)
455
456 def set_count(self, count):
457 self.count = count
458
459 def set_active(self, active):
460 if active:
461 self.color = HobColors.DEEP_RED
462 else:
463 self.color = HobColors.GRAY
464
465class HobTabLabel(gtk.HBox):
466 def __init__(self, text, count=0):
467 gtk.HBox.__init__(self, False, 0)
468 self.indicator = HobIndicator(count)
469 self.indicator.show()
470 self.pack_end(self.indicator, False, False)
471 self.lbl = gtk.Label(text)
472 self.lbl.set_alignment(0.0, 0.5)
473 self.lbl.show()
474 self.pack_end(self.lbl, True, True, 6)
475
476 def set_count(self, count):
477 self.indicator.set_count(count)
478
479 def set_active(self, active=True):
480 self.indicator.set_active(active)
481
482class HobNotebook(gtk.Notebook):
483 def __init__(self):
484 gtk.Notebook.__init__(self)
485 self.set_property('homogeneous', True)
486
487 self.pages = []
488
489 self.search = None
490 self.search_focus = False
491 self.page_changed = False
492
493 self.connect("switch-page", self.page_changed_cb)
494
495 self.show_all()
496
497 def page_changed_cb(self, nb, page, page_num):
498 for p, lbl in enumerate(self.pages):
499 if p == page_num:
500 lbl.set_active()
501 else:
502 lbl.set_active(False)
503
504 if self.search:
505 self.page_changed = True
506 self.reset_entry(self.search, page_num)
507
508 def append_page(self, child, tab_label, tab_tooltip=None):
509 label = HobTabLabel(tab_label)
510 if tab_tooltip:
511 label.set_tooltip_text(tab_tooltip)
512 label.set_active(False)
513 self.pages.append(label)
514 gtk.Notebook.append_page(self, child, label)
515
516 def set_entry(self, names, tips):
517 self.search = gtk.Entry()
518 self.search_names = names
519 self.search_tips = tips
520 style = self.search.get_style()
521 style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
522 self.search.set_style(style)
523 self.search.set_text(names[0])
524 self.search.set_tooltip_text(self.search_tips[0])
525 self.search.props.has_tooltip = True
526
527 self.search.set_editable(False)
528 self.search.set_icon_from_stock(gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR)
529 self.search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
530 self.search.connect("icon-release", self.set_search_entry_clear_cb)
531 self.search.set_width_chars(30)
532 self.search.show()
533
534 self.search.connect("focus-in-event", self.set_search_entry_editable_cb)
535 self.search.connect("focus-out-event", self.set_search_entry_reset_cb)
536 self.set_action_widget(self.search, gtk.PACK_END)
537
538 def show_indicator_icon(self, title, number):
539 for child in self.pages:
540 if child.lbl.get_label() == title:
541 child.set_count(number)
542
543 def hide_indicator_icon(self, title):
544 for child in self.pages:
545 if child.lbl.get_label() == title:
546 child.set_count(0)
547
548 def set_search_entry_editable_cb(self, search, event):
549 self.search_focus = True
550 search.set_editable(True)
551 text = search.get_text()
552 if text in self.search_names:
553 search.set_text("")
554 style = self.search.get_style()
555 style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.BLACK, False, False)
556 search.set_style(style)
557
558 def set_search_entry_reset_cb(self, search, event):
559 page_num = self.get_current_page()
560 text = search.get_text()
561 if not text:
562 self.reset_entry(search, page_num)
563
564 def reset_entry(self, entry, page_num):
565 style = entry.get_style()
566 style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
567 entry.set_style(style)
568 entry.set_text(self.search_names[page_num])
569 entry.set_tooltip_text(self.search_tips[page_num])
570 entry.set_editable(False)
571 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
572
573 def set_search_entry_clear_cb(self, search, icon_pos, event):
574 if search.get_editable() == True:
575 search.set_text("")
576 search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
577 search.grab_focus()
578
579 def set_page(self, title):
580 for child in self.pages:
581 if child.lbl.get_label() == title:
582 child.grab_focus()
583 self.set_current_page(self.pages.index(child))
584 return
585
586class HobWarpCellRendererText(gtk.CellRendererText):
587 def __init__(self, col_number):
588 gtk.CellRendererText.__init__(self)
589 self.set_property("wrap-mode", pango.WRAP_WORD_CHAR)
590 self.set_property("wrap-width", 300) # default value wrap width is 300
591 self.col_n = col_number
592
593 def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
594 if widget:
595 self.props.wrap_width = self.get_resized_wrap_width(widget, widget.get_column(self.col_n))
596 return gtk.CellRendererText.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
597
598 def get_resized_wrap_width(self, treeview, column):
599 otherCols = []
600 for col in treeview.get_columns():
601 if col != column:
602 otherCols.append(col)
603 adjwidth = treeview.allocation.width - sum(c.get_width() for c in otherCols)
604 adjwidth -= treeview.style_get_property("horizontal-separator") * 4
605 if self.props.wrap_width == adjwidth or adjwidth <= 0:
606 adjwidth = self.props.wrap_width
607 return adjwidth
608
609gobject.type_register(HobWarpCellRendererText)
610
611class HobIconChecker(hic):
612 def set_hob_icon_to_stock_icon(self, file_path, stock_id=""):
613 try:
614 pixbuf = gtk.gdk.pixbuf_new_from_file(file_path)
615 except Exception, e:
616 return None
617
618 if stock_id and (gtk.icon_factory_lookup_default(stock_id) == None):
619 icon_factory = gtk.IconFactory()
620 icon_factory.add_default()
621 icon_factory.add(stock_id, gtk.IconSet(pixbuf))
622 gtk.stock_add([(stock_id, '_label', 0, 0, '')])
623
624 return icon_factory.lookup(stock_id)
625
626 return None
627
628 """
629 For make hob icon consistently by request, and avoid icon view diff by system or gtk version, we use some 'hob icon' to replace the 'gtk icon'.
630 this function check the stock_id and make hob_id to replaced the gtk_id then return it or ""
631 """
632 def check_stock_icon(self, stock_name=""):
633 HOB_CHECK_STOCK_NAME = {
634 ('hic-dialog-info', 'gtk-dialog-info', 'dialog-info') : self.ICON_INDI_INFO_FILE,
635 ('hic-ok', 'gtk-ok', 'ok') : self.ICON_INDI_TICK_FILE,
636 ('hic-dialog-error', 'gtk-dialog-error', 'dialog-error') : self.ICON_INDI_ERROR_FILE,
637 ('hic-dialog-warning', 'gtk-dialog-warning', 'dialog-warning') : self.ICON_INDI_ALERT_FILE,
638 ('hic-task-refresh', 'gtk-execute', 'execute') : self.ICON_INDI_REFRESH_FILE,
639 }
640 valid_stock_id = stock_name
641 if stock_name:
642 for names, path in HOB_CHECK_STOCK_NAME.iteritems():
643 if stock_name in names:
644 valid_stock_id = names[0]
645 if not gtk.icon_factory_lookup_default(valid_stock_id):
646 self.set_hob_icon_to_stock_icon(path, valid_stock_id)
647
648 return valid_stock_id
649
650class HobCellRendererController(gobject.GObject):
651 (MODE_CYCLE_RUNNING, MODE_ONE_SHORT) = range(2)
652 __gsignals__ = {
653 "run-timer-stopped" : (gobject.SIGNAL_RUN_LAST,
654 gobject.TYPE_NONE,
655 ()),
656 }
657 def __init__(self, runningmode=MODE_CYCLE_RUNNING, is_draw_row=False):
658 gobject.GObject.__init__(self)
659 self.timeout_id = None
660 self.current_angle_pos = 0.0
661 self.step_angle = 0.0
662 self.tree_headers_height = 0
663 self.running_cell_areas = []
664 self.running_mode = runningmode
665 self.is_queue_draw_row_area = is_draw_row
666 self.force_stop_enable = False
667
668 def is_active(self):
669 if self.timeout_id:
670 return True
671 else:
672 return False
673
674 def reset_run(self):
675 self.force_stop()
676 self.running_cell_areas = []
677 self.current_angle_pos = 0.0
678 self.step_angle = 0.0
679
680 ''' time_iterval: (1~1000)ms, which will be as the basic interval count for timer
681 init_usrdata: the current data which related the progress-bar will be at
682 min_usrdata: the range of min of user data
683 max_usrdata: the range of max of user data
684 step: each step which you want to progress
685 Note: the init_usrdata should in the range of from min to max, and max should > min
686 step should < (max - min)
687 '''
688 def start_run(self, time_iterval, init_usrdata, min_usrdata, max_usrdata, step, tree):
689 if (not time_iterval) or (not max_usrdata):
690 return
691 usr_range = (max_usrdata - min_usrdata) * 1.0
692 self.current_angle_pos = (init_usrdata * 1.0) / usr_range
693 self.step_angle = (step * 1) / usr_range
694 self.timeout_id = gobject.timeout_add(int(time_iterval),
695 self.make_image_on_progressing_cb, tree)
696 self.tree_headers_height = self.get_treeview_headers_height(tree)
697 self.force_stop_enable = False
698
699 def force_stop(self):
700 self.emit("run-timer-stopped")
701 self.force_stop_enable = True
702 if self.timeout_id:
703 if gobject.source_remove(self.timeout_id):
704 self.timeout_id = None
705
706 def on_draw_pixbuf_cb(self, pixbuf, cr, x, y, img_width, img_height, do_refresh=True):
707 if pixbuf:
708 r = max(img_width/2, img_height/2)
709 cr.translate(x + r, y + r)
710 if do_refresh:
711 cr.rotate(2 * math.pi * self.current_angle_pos)
712
713 cr.set_source_pixbuf(pixbuf, -img_width/2, -img_height/2)
714 cr.paint()
715
716 def on_draw_fadeinout_cb(self, cr, color, x, y, width, height, do_fadeout=True):
717 if do_fadeout:
718 alpha = self.current_angle_pos * 0.8
719 else:
720 alpha = (1.0 - self.current_angle_pos) * 0.8
721
722 cr.set_source_rgba(color.red, color.green, color.blue, alpha)
723 cr.rectangle(x, y, width, height)
724 cr.fill()
725
726 def get_treeview_headers_height(self, tree):
727 if tree and (tree.get_property("headers-visible") == True):
728 height = tree.get_allocation().height - tree.get_bin_window().get_size()[1]
729 return height
730
731 return 0
732
733 def make_image_on_progressing_cb(self, tree):
734 self.current_angle_pos += self.step_angle
735 if self.running_mode == self.MODE_CYCLE_RUNNING:
736 if (self.current_angle_pos >= 1):
737 self.current_angle_pos = 0
738 else:
739 if self.current_angle_pos > 1:
740 self.force_stop()
741 return False
742
743 if self.is_queue_draw_row_area:
744 for path in self.running_cell_areas:
745 rect = tree.get_cell_area(path, tree.get_column(0))
746 row_x, _, row_width, _ = tree.get_visible_rect()
747 tree.queue_draw_area(row_x, rect.y + self.tree_headers_height, row_width, rect.height)
748 else:
749 for rect in self.running_cell_areas:
750 tree.queue_draw_area(rect.x, rect.y + self.tree_headers_height, rect.width, rect.height)
751
752 return (not self.force_stop_enable)
753
754 def append_running_cell_area(self, cell_area):
755 if cell_area and (cell_area not in self.running_cell_areas):
756 self.running_cell_areas.append(cell_area)
757
758 def remove_running_cell_area(self, cell_area):
759 if cell_area in self.running_cell_areas:
760 self.running_cell_areas.remove(cell_area)
761 if not self.running_cell_areas:
762 self.reset_run()
763
764gobject.type_register(HobCellRendererController)
765
766class HobCellRendererPixbuf(gtk.CellRendererPixbuf):
767 def __init__(self):
768 gtk.CellRendererPixbuf.__init__(self)
769 self.control = HobCellRendererController()
770 # add icon checker for make the gtk-icon transfer to hob-icon
771 self.checker = HobIconChecker()
772 self.set_property("stock-size", gtk.ICON_SIZE_DND)
773
774 def get_pixbuf_from_stock_icon(self, widget, stock_id="", size=gtk.ICON_SIZE_DIALOG):
775 if widget and stock_id and gtk.icon_factory_lookup_default(stock_id):
776 return widget.render_icon(stock_id, size)
777
778 return None
779
780 def set_icon_name_to_id(self, new_name):
781 if new_name and type(new_name) == str:
782 # check the name is need to transfer to hob icon or not
783 name = self.checker.check_stock_icon(new_name)
784 if name.startswith("hic") or name.startswith("gtk"):
785 stock_id = name
786 else:
787 stock_id = 'gtk-' + name
788
789 return stock_id
790
791 ''' render cell exactly, "icon-name" is priority
792 if use the 'hic-task-refresh' will make the pix animation
793 if 'pix' will change the pixbuf for it from the pixbuf or image.
794 '''
795 def do_render(self, window, tree, background_area,cell_area, expose_area, flags):
796 if (not self.control) or (not tree):
797 return
798
799 x, y, w, h = self.on_get_size(tree, cell_area)
800 x += cell_area.x
801 y += cell_area.y
802 w -= 2 * self.get_property("xpad")
803 h -= 2 * self.get_property("ypad")
804
805 stock_id = ""
806 if self.props.icon_name:
807 stock_id = self.set_icon_name_to_id(self.props.icon_name)
808 elif self.props.stock_id:
809 stock_id = self.props.stock_id
810 elif self.props.pixbuf:
811 pix = self.props.pixbuf
812 else:
813 return
814
815 if stock_id:
816 pix = self.get_pixbuf_from_stock_icon(tree, stock_id, self.props.stock_size)
817 if stock_id == 'hic-task-refresh':
818 self.control.append_running_cell_area(cell_area)
819 if self.control.is_active():
820 self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, True)
821 else:
822 self.control.start_run(200, 0, 0, 1000, 150, tree)
823 else:
824 self.control.remove_running_cell_area(cell_area)
825 self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, False)
826
827 def on_get_size(self, widget, cell_area):
828 if self.props.icon_name or self.props.pixbuf or self.props.stock_id:
829 w, h = gtk.icon_size_lookup(self.props.stock_size)
830 calc_width = self.get_property("xpad") * 2 + w
831 calc_height = self.get_property("ypad") * 2 + h
832 x_offset = 0
833 y_offset = 0
834 if cell_area and w > 0 and h > 0:
835 x_offset = self.get_property("xalign") * (cell_area.width - calc_width - self.get_property("xpad"))
836 y_offset = self.get_property("yalign") * (cell_area.height - calc_height - self.get_property("ypad"))
837
838 return x_offset, y_offset, w, h
839
840 return 0, 0, 0, 0
841
842gobject.type_register(HobCellRendererPixbuf)
843
844class HobCellRendererToggle(gtk.CellRendererToggle):
845 def __init__(self):
846 gtk.CellRendererToggle.__init__(self)
847 self.ctrl = HobCellRendererController(is_draw_row=True)
848 self.ctrl.running_mode = self.ctrl.MODE_ONE_SHORT
849 self.cell_attr = {"fadeout": False, "number_of_children": 0}
850
851 def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
852 if (not self.ctrl) or (not widget):
853 return
854
855 if flags & gtk.CELL_RENDERER_SELECTED:
856 state = gtk.STATE_SELECTED
857 else:
858 state = gtk.STATE_NORMAL
859
860 if self.ctrl.is_active():
861 path = widget.get_path_at_pos(cell_area.x + cell_area.width/2, cell_area.y + cell_area.height/2)
862 # sometimes the parameters of cell_area will be a negative number,such as pull up down the scroll bar
863 # it's over the tree container range, so the path will be bad
864 if not path: return
865 path = path[0]
866 if path in self.ctrl.running_cell_areas:
867 cr = window.cairo_create()
868 color = widget.get_style().base[state]
869
870 row_x, _, row_width, _ = widget.get_visible_rect()
871 border_y = self.get_property("ypad")
872 self.ctrl.on_draw_fadeinout_cb(cr, color, row_x, cell_area.y - border_y, row_width, \
873 cell_area.height + border_y * 2, self.cell_attr["fadeout"])
874 # draw number of a group
875 if self.cell_attr["number_of_children"]:
876 text = "%d pkg" % self.cell_attr["number_of_children"]
877 pangolayout = widget.create_pango_layout(text)
878 textw, texth = pangolayout.get_pixel_size()
879 x = cell_area.x + (cell_area.width/2) - (textw/2)
880 y = cell_area.y + (cell_area.height/2) - (texth/2)
881
882 widget.style.paint_layout(window, state, True, cell_area, widget, "checkbox", x, y, pangolayout)
883 else:
884 return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
885
886 '''delay: normally delay time is 1000ms
887 cell_list: whilch cells need to be render
888 '''
889 def fadeout(self, tree, delay, cell_list=None):
890 if (delay < 200) or (not tree):
891 return
892 self.cell_attr["fadeout"] = True
893 self.ctrl.running_cell_areas = cell_list
894 self.ctrl.start_run(200, 0, 0, delay, (delay * 200 / 1000), tree)
895
896 def connect_render_state_changed(self, func, usrdata=None):
897 if not func:
898 return
899 if usrdata:
900 self.ctrl.connect("run-timer-stopped", func, self, usrdata)
901 else:
902 self.ctrl.connect("run-timer-stopped", func, self)
903
904gobject.type_register(HobCellRendererToggle)
diff --git a/bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py b/bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py
new file mode 100644
index 0000000000..2766bea8c7
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py
@@ -0,0 +1,561 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import glib
25import re
26from bb.ui.crumbs.progressbar import HobProgressBar
27from bb.ui.crumbs.hobcolor import HobColors
28from bb.ui.crumbs.hobwidget import hic, HobImageButton, HobInfoButton, HobAltButton, HobButton
29from bb.ui.crumbs.hoblistmodel import RecipeListModel
30from bb.ui.crumbs.hobpages import HobPage
31from bb.ui.crumbs.hig.retrieveimagedialog import RetrieveImageDialog
32
33#
34# ImageConfigurationPage
35#
36class ImageConfigurationPage (HobPage):
37
38 __dummy_machine__ = "--select a machine--"
39 __dummy_image__ = "--select an image recipe--"
40 __custom_image__ = "Select from my image recipes"
41
42 def __init__(self, builder):
43 super(ImageConfigurationPage, self).__init__(builder, "Image configuration")
44
45 self.image_combo_id = None
46 # we use machine_combo_changed_by_manual to identify the machine is changed by code
47 # or by manual. If by manual, all user's recipe selection and package selection are
48 # cleared.
49 self.machine_combo_changed_by_manual = True
50 self.stopping = False
51 self.warning_shift = 0
52 self.custom_image_selected = None
53 self.create_visual_elements()
54
55 def create_visual_elements(self):
56 # create visual elements
57 self.toolbar = gtk.Toolbar()
58 self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
59 self.toolbar.set_style(gtk.TOOLBAR_BOTH)
60
61 my_images_button = self.append_toolbar_button(self.toolbar,
62 "Images",
63 hic.ICON_IMAGES_DISPLAY_FILE,
64 hic.ICON_IMAGES_HOVER_FILE,
65 "Open previously built images",
66 self.my_images_button_clicked_cb)
67 settings_button = self.append_toolbar_button(self.toolbar,
68 "Settings",
69 hic.ICON_SETTINGS_DISPLAY_FILE,
70 hic.ICON_SETTINGS_HOVER_FILE,
71 "View additional build settings",
72 self.settings_button_clicked_cb)
73
74 self.config_top_button = self.add_onto_top_bar(self.toolbar)
75
76 self.gtable = gtk.Table(40, 40, True)
77 self.create_config_machine()
78 self.create_config_baseimg()
79 self.config_build_button = self.create_config_build_button()
80
81 def _remove_all_widget(self):
82 children = self.gtable.get_children() or []
83 for child in children:
84 self.gtable.remove(child)
85 children = self.box_group_area.get_children() or []
86 for child in children:
87 self.box_group_area.remove(child)
88 children = self.get_children() or []
89 for child in children:
90 self.remove(child)
91
92 def _pack_components(self, pack_config_build_button = False):
93 self._remove_all_widget()
94 self.pack_start(self.config_top_button, expand=False, fill=False)
95 self.pack_start(self.group_align, expand=True, fill=True)
96
97 self.box_group_area.pack_start(self.gtable, expand=True, fill=True)
98 if pack_config_build_button:
99 self.box_group_area.pack_end(self.config_build_button, expand=False, fill=False)
100 else:
101 box = gtk.HBox(False, 6)
102 box.show()
103 subbox = gtk.HBox(False, 0)
104 subbox.set_size_request(205, 49)
105 subbox.show()
106 box.add(subbox)
107 self.box_group_area.pack_end(box, False, False)
108
109 def show_machine(self):
110 self.progress_bar.reset()
111 self._pack_components(pack_config_build_button = False)
112 self.set_config_machine_layout(show_progress_bar = False)
113 self.show_all()
114
115 def update_progress_bar(self, title, fraction, status=None):
116 if self.stopping == False:
117 self.progress_bar.update(fraction)
118 self.progress_bar.set_text(title)
119 self.progress_bar.set_rcstyle(status)
120
121 def show_info_populating(self):
122 self._pack_components(pack_config_build_button = False)
123 self.set_config_machine_layout(show_progress_bar = True)
124 self.show_all()
125
126 def show_info_populated(self):
127 self.progress_bar.reset()
128 self._pack_components(pack_config_build_button = False)
129 self.set_config_machine_layout(show_progress_bar = False)
130 self.set_config_baseimg_layout()
131 self.show_all()
132
133 def show_baseimg_selected(self):
134 self.progress_bar.reset()
135 self._pack_components(pack_config_build_button = True)
136 self.set_config_machine_layout(show_progress_bar = False)
137 self.set_config_baseimg_layout()
138 self.show_all()
139 if self.builder.recipe_model.get_selected_image() == self.builder.recipe_model.__custom_image__:
140 self.just_bake_button.hide()
141
142 def add_warnings_bar(self):
143 #create the warnings bar shown when recipes parsing generates warnings
144 color = HobColors.KHAKI
145 warnings_bar = gtk.EventBox()
146 warnings_bar.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
147 warnings_bar.set_flags(gtk.CAN_DEFAULT)
148 warnings_bar.grab_default()
149
150 build_stop_tab = gtk.Table(10, 20, True)
151 warnings_bar.add(build_stop_tab)
152
153 icon = gtk.Image()
154 icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ALERT_FILE)
155 icon.set_from_pixbuf(icon_pix_buffer)
156 build_stop_tab.attach(icon, 0, 2, 0, 10)
157
158 label = gtk.Label()
159 label.set_alignment(0.0, 0.5)
160 warnings_nb = len(self.builder.parsing_warnings)
161 if warnings_nb == 1:
162 label.set_markup("<span size='x-large'><b>1 recipe parsing warning</b></span>")
163 else:
164 label.set_markup("<span size='x-large'><b>%s recipe parsing warnings</b></span>" % warnings_nb)
165 build_stop_tab.attach(label, 2, 12, 0, 10)
166
167 view_warnings_button = HobButton("View warnings")
168 view_warnings_button.connect('clicked', self.view_warnings_button_clicked_cb)
169 build_stop_tab.attach(view_warnings_button, 15, 19, 1, 9)
170
171 return warnings_bar
172
173 def disable_warnings_bar(self):
174 if self.builder.parsing_warnings:
175 if hasattr(self, 'warnings_bar'):
176 self.warnings_bar.hide_all()
177 self.builder.parsing_warnings = []
178
179 def create_config_machine(self):
180 self.machine_title = gtk.Label()
181 self.machine_title.set_alignment(0.0, 0.5)
182 mark = "<span %s>Select a machine</span>" % self.span_tag('x-large', 'bold')
183 self.machine_title.set_markup(mark)
184
185 self.machine_title_desc = gtk.Label()
186 self.machine_title_desc.set_alignment(0.0, 0.5)
187 mark = ("<span %s>Your selection is the profile of the target machine for which you"
188 " are building the image.\n</span>") % (self.span_tag('medium'))
189 self.machine_title_desc.set_markup(mark)
190
191 self.machine_combo = gtk.combo_box_new_text()
192 self.machine_combo.connect("changed", self.machine_combo_changed_cb)
193
194 icon_file = hic.ICON_LAYERS_DISPLAY_FILE
195 hover_file = hic.ICON_LAYERS_HOVER_FILE
196 self.layer_button = HobImageButton("Layers", "Add support for machines, software, etc.",
197 icon_file, hover_file)
198 self.layer_button.connect("clicked", self.layer_button_clicked_cb)
199
200 markup = "Layers are a powerful mechanism to extend the Yocto Project "
201 markup += "with your own functionality.\n"
202 markup += "For more on layers, check the <a href=\""
203 markup += "http://www.yoctoproject.org/docs/current/dev-manual/"
204 markup += "dev-manual.html#understanding-and-using-layers\">reference manual</a>."
205 self.layer_info_icon = HobInfoButton("<b>Layers</b>" + "*" + markup, self.get_parent())
206 self.progress_bar = HobProgressBar()
207 self.stop_button = HobAltButton("Stop")
208 self.stop_button.connect("clicked", self.stop_button_clicked_cb)
209 self.machine_separator = gtk.HSeparator()
210
211 def set_config_machine_layout(self, show_progress_bar = False):
212 self.gtable.attach(self.machine_title, 0, 40, 0, 4)
213 self.gtable.attach(self.machine_title_desc, 0, 40, 4, 6)
214 self.gtable.attach(self.machine_combo, 0, 12, 7, 10)
215 self.gtable.attach(self.layer_button, 14, 36, 7, 12)
216 self.gtable.attach(self.layer_info_icon, 36, 40, 7, 11)
217 if show_progress_bar:
218 #self.gtable.attach(self.progress_box, 0, 40, 15, 18)
219 self.gtable.attach(self.progress_bar, 0, 37, 15, 18)
220 self.gtable.attach(self.stop_button, 37, 40, 15, 18, 0, 0)
221 if self.builder.parsing_warnings:
222 self.warnings_bar = self.add_warnings_bar()
223 self.gtable.attach(self.warnings_bar, 0, 40, 14, 18)
224 self.warning_shift = 4
225 else:
226 self.warning_shift = 0
227 self.gtable.attach(self.machine_separator, 0, 40, 13, 14)
228
229 def create_config_baseimg(self):
230 self.image_title = gtk.Label()
231 self.image_title.set_alignment(0, 1.0)
232 mark = "<span %s>Select an image recipe</span>" % self.span_tag('x-large', 'bold')
233 self.image_title.set_markup(mark)
234
235 self.image_title_desc = gtk.Label()
236 self.image_title_desc.set_alignment(0, 0.5)
237
238 mark = ("<span %s>Image recipes are a starting point for the type of image you want. "
239 "You can build them as \n"
240 "they are or edit them to suit your needs.\n</span>") % self.span_tag('medium')
241 self.image_title_desc.set_markup(mark)
242
243 self.image_combo = gtk.combo_box_new_text()
244 self.image_combo.set_row_separator_func(self.combo_separator_func, None)
245 self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
246
247 self.image_desc = gtk.Label()
248 self.image_desc.set_alignment(0.0, 0.5)
249 self.image_desc.set_size_request(256, -1)
250 self.image_desc.set_justify(gtk.JUSTIFY_LEFT)
251 self.image_desc.set_line_wrap(True)
252
253 # button to view recipes
254 icon_file = hic.ICON_RCIPE_DISPLAY_FILE
255 hover_file = hic.ICON_RCIPE_HOVER_FILE
256 self.view_adv_configuration_button = HobImageButton("Advanced configuration",
257 "Select image types, package formats, etc",
258 icon_file, hover_file)
259 self.view_adv_configuration_button.connect("clicked", self.view_adv_configuration_button_clicked_cb)
260
261 self.image_separator = gtk.HSeparator()
262
263 def combo_separator_func(self, model, iter, user_data):
264 name = model.get_value(iter, 0)
265 if name == "--Separator--":
266 return True
267
268 def set_config_baseimg_layout(self):
269 self.gtable.attach(self.image_title, 0, 40, 15+self.warning_shift, 17+self.warning_shift)
270 self.gtable.attach(self.image_title_desc, 0, 40, 18+self.warning_shift, 22+self.warning_shift)
271 self.gtable.attach(self.image_combo, 0, 12, 23+self.warning_shift, 26+self.warning_shift)
272 self.gtable.attach(self.image_desc, 0, 12, 27+self.warning_shift, 33+self.warning_shift)
273 self.gtable.attach(self.view_adv_configuration_button, 14, 36, 23+self.warning_shift, 28+self.warning_shift)
274 self.gtable.attach(self.image_separator, 0, 40, 35+self.warning_shift, 36+self.warning_shift)
275
276 def create_config_build_button(self):
277 # Create the "Build packages" and "Build image" buttons at the bottom
278 button_box = gtk.HBox(False, 6)
279
280 # create button "Build image"
281 self.just_bake_button = HobButton("Build image")
282 self.just_bake_button.set_tooltip_text("Build the image recipe as it is")
283 self.just_bake_button.connect("clicked", self.just_bake_button_clicked_cb)
284 button_box.pack_end(self.just_bake_button, expand=False, fill=False)
285
286 # create button "Edit image recipe"
287 self.edit_image_button = HobAltButton("Edit image recipe")
288 self.edit_image_button.set_tooltip_text("Customize the recipes and packages to be included in your image")
289 self.edit_image_button.connect("clicked", self.edit_image_button_clicked_cb)
290 button_box.pack_end(self.edit_image_button, expand=False, fill=False)
291
292 return button_box
293
294 def stop_button_clicked_cb(self, button):
295 self.stopping = True
296 self.progress_bar.set_text("Stopping recipe parsing")
297 self.progress_bar.set_rcstyle("stop")
298 self.builder.cancel_parse_sync()
299
300 def view_warnings_button_clicked_cb(self, button):
301 self.builder.show_warning_dialog()
302
303 def machine_combo_changed_idle_cb(self):
304 self.builder.window.set_cursor(None)
305
306 def machine_combo_changed_cb(self, machine_combo):
307 self.stopping = False
308 self.builder.parsing_warnings = []
309 combo_item = machine_combo.get_active_text()
310 if not combo_item or combo_item == self.__dummy_machine__:
311 return
312
313 self.builder.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
314 self.builder.wait(0.1) #wait for combo and cursor to update
315
316 # remove __dummy_machine__ item from the store list after first user selection
317 # because it is no longer valid
318 combo_store = machine_combo.get_model()
319 if len(combo_store) and (combo_store[0][0] == self.__dummy_machine__):
320 machine_combo.remove_text(0)
321
322 self.builder.configuration.curr_mach = combo_item
323 if self.machine_combo_changed_by_manual:
324 self.builder.configuration.clear_selection()
325 # reset machine_combo_changed_by_manual
326 self.machine_combo_changed_by_manual = True
327
328 self.builder.configuration.selected_image = None
329
330 # Do reparse recipes
331 self.builder.populate_recipe_package_info_async()
332
333 glib.idle_add(self.machine_combo_changed_idle_cb)
334
335 def update_machine_combo(self):
336 self.disable_warnings_bar()
337 all_machines = [self.__dummy_machine__] + self.builder.parameters.all_machines
338
339 model = self.machine_combo.get_model()
340 model.clear()
341 for machine in all_machines:
342 self.machine_combo.append_text(machine)
343 self.machine_combo.set_active(0)
344
345 def switch_machine_combo(self):
346 self.disable_warnings_bar()
347 self.machine_combo_changed_by_manual = False
348 model = self.machine_combo.get_model()
349 active = 0
350 while active < len(model):
351 if model[active][0] == self.builder.configuration.curr_mach:
352 self.machine_combo.set_active(active)
353 return
354 active += 1
355
356 if model[0][0] != self.__dummy_machine__:
357 self.machine_combo.insert_text(0, self.__dummy_machine__)
358
359 self.machine_combo.set_active(0)
360
361 def update_image_desc(self):
362 desc = ""
363 selected_image = self.image_combo.get_active_text()
364 if selected_image and selected_image in self.builder.recipe_model.pn_path.keys():
365 image_path = self.builder.recipe_model.pn_path[selected_image]
366 image_iter = self.builder.recipe_model.get_iter(image_path)
367 desc = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_DESC)
368
369 mark = ("<span %s>%s</span>\n") % (self.span_tag('small'), desc)
370 self.image_desc.set_markup(mark)
371
372 def image_combo_changed_idle_cb(self, selected_image, selected_recipes, selected_packages):
373 self.builder.update_recipe_model(selected_image, selected_recipes)
374 self.builder.update_package_model(selected_packages)
375 self.builder.window_sensitive(True)
376
377 def image_combo_changed_cb(self, combo):
378 self.builder.window_sensitive(False)
379 selected_image = self.image_combo.get_active_text()
380 if selected_image == self.__custom_image__:
381 topdir = self.builder.get_topdir()
382 images_dir = topdir + "/recipes/images/custom/"
383 self.builder.ensure_dir(images_dir)
384
385 dialog = RetrieveImageDialog(images_dir, "Select from my image recipes",
386 self.builder, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
387 response = dialog.run()
388 if response == gtk.RESPONSE_OK:
389 image_name = dialog.get_filename()
390 head, tail = os.path.split(image_name)
391 selected_image = os.path.splitext(tail)[0]
392 self.custom_image_selected = selected_image
393 self.update_image_combo(self.builder.recipe_model, selected_image)
394 else:
395 selected_image = self.__dummy_image__
396 self.update_image_combo(self.builder.recipe_model, None)
397 dialog.destroy()
398 else:
399 if self.custom_image_selected:
400 self.custom_image_selected = None
401 self.update_image_combo(self.builder.recipe_model, selected_image)
402
403 if not selected_image or (selected_image == self.__dummy_image__):
404 self.builder.window_sensitive(True)
405 self.just_bake_button.hide()
406 self.edit_image_button.hide()
407 return
408
409 # remove __dummy_image__ item from the store list after first user selection
410 # because it is no longer valid
411 combo_store = combo.get_model()
412 if len(combo_store) and (combo_store[0][0] == self.__dummy_image__):
413 combo.remove_text(0)
414
415 self.builder.customized = False
416
417 selected_recipes = []
418
419 image_path = self.builder.recipe_model.pn_path[selected_image]
420 image_iter = self.builder.recipe_model.get_iter(image_path)
421 selected_packages = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_INSTALL).split()
422 self.update_image_desc()
423
424 self.builder.recipe_model.reset()
425 self.builder.package_model.reset()
426
427 self.show_baseimg_selected()
428
429 if selected_image == self.builder.recipe_model.__custom_image__:
430 self.just_bake_button.hide()
431
432 glib.idle_add(self.image_combo_changed_idle_cb, selected_image, selected_recipes, selected_packages)
433
434 def _image_combo_connect_signal(self):
435 if not self.image_combo_id:
436 self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
437
438 def _image_combo_disconnect_signal(self):
439 if self.image_combo_id:
440 self.image_combo.disconnect(self.image_combo_id)
441 self.image_combo_id = None
442
443 def update_image_combo(self, recipe_model, selected_image):
444 # Update the image combo according to the images in the recipe_model
445 # populate image combo
446 filter = {RecipeListModel.COL_TYPE : ['image']}
447 image_model = recipe_model.tree_model(filter)
448 image_model.set_sort_column_id(recipe_model.COL_NAME, gtk.SORT_ASCENDING)
449 active = 0
450 cnt = 0
451
452 white_pattern = []
453 if self.builder.parameters.image_white_pattern:
454 for i in self.builder.parameters.image_white_pattern.split():
455 white_pattern.append(re.compile(i))
456
457 black_pattern = []
458 if self.builder.parameters.image_black_pattern:
459 for i in self.builder.parameters.image_black_pattern.split():
460 black_pattern.append(re.compile(i))
461 black_pattern.append(re.compile("hob-image"))
462 black_pattern.append(re.compile("edited(-[0-9]*)*.bb$"))
463
464 it = image_model.get_iter_first()
465 self._image_combo_disconnect_signal()
466 model = self.image_combo.get_model()
467 model.clear()
468 # Set a indicator text to combo store when first open
469 if not selected_image:
470 self.image_combo.append_text(self.__dummy_image__)
471 cnt = cnt + 1
472
473 self.image_combo.append_text(self.__custom_image__)
474 self.image_combo.append_text("--Separator--")
475 cnt = cnt + 2
476
477 topdir = self.builder.get_topdir()
478 # append and set active
479 while it:
480 path = image_model.get_path(it)
481 it = image_model.iter_next(it)
482 image_name = image_model[path][recipe_model.COL_NAME]
483 if image_name == self.builder.recipe_model.__custom_image__:
484 continue
485
486 if black_pattern:
487 allow = True
488 for pattern in black_pattern:
489 if pattern.search(image_name):
490 allow = False
491 break
492 elif white_pattern:
493 allow = False
494 for pattern in white_pattern:
495 if pattern.search(image_name):
496 allow = True
497 break
498 else:
499 allow = True
500
501 file_name = image_model[path][recipe_model.COL_FILE]
502 if file_name and topdir in file_name:
503 allow = False
504
505 if allow:
506 self.image_combo.append_text(image_name)
507 if image_name == selected_image:
508 active = cnt
509 cnt = cnt + 1
510 self.image_combo.append_text(self.builder.recipe_model.__custom_image__)
511
512 if selected_image == self.builder.recipe_model.__custom_image__:
513 active = cnt
514
515 if self.custom_image_selected:
516 self.image_combo.append_text("--Separator--")
517 self.image_combo.append_text(self.custom_image_selected)
518 cnt = cnt + 2
519 if self.custom_image_selected == selected_image:
520 active = cnt
521
522 self.image_combo.set_active(active)
523
524 if active != 0:
525 self.show_baseimg_selected()
526
527 self._image_combo_connect_signal()
528
529 def layer_button_clicked_cb(self, button):
530 # Create a layer selection dialog
531 self.builder.show_layer_selection_dialog()
532
533 def view_adv_configuration_button_clicked_cb(self, button):
534 # Create an advanced settings dialog
535 response, settings_changed = self.builder.show_adv_settings_dialog()
536 if not response:
537 return
538 if settings_changed:
539 self.builder.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
540 self.builder.wait(0.1) #wait for adv_settings_dialog to terminate
541 self.builder.reparse_post_adv_settings()
542 self.builder.window.set_cursor(None)
543
544 def just_bake_button_clicked_cb(self, button):
545 self.builder.parsing_warnings = []
546 self.builder.just_bake()
547
548 def edit_image_button_clicked_cb(self, button):
549 self.builder.set_base_image()
550 self.builder.show_recipes()
551
552 def my_images_button_clicked_cb(self, button):
553 self.builder.show_load_my_images_dialog()
554
555 def settings_button_clicked_cb(self, button):
556 # Create an advanced settings dialog
557 response, settings_changed = self.builder.show_simple_settings_dialog()
558 if not response:
559 return
560 if settings_changed:
561 self.builder.reparse_post_adv_settings()
diff --git a/bitbake/lib/bb/ui/crumbs/imagedetailspage.py b/bitbake/lib/bb/ui/crumbs/imagedetailspage.py
new file mode 100755
index 0000000000..352e9489fd
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/imagedetailspage.py
@@ -0,0 +1,669 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gobject
24import gtk
25from bb.ui.crumbs.hobcolor import HobColors
26from bb.ui.crumbs.hobwidget import hic, HobViewTable, HobAltButton, HobButton
27from bb.ui.crumbs.hobpages import HobPage
28import subprocess
29from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
30from bb.ui.crumbs.hig.saveimagedialog import SaveImageDialog
31
32#
33# ImageDetailsPage
34#
35class ImageDetailsPage (HobPage):
36
37 class DetailBox (gtk.EventBox):
38 def __init__(self, widget = None, varlist = None, vallist = None, icon = None, button = None, button2=None, color = HobColors.LIGHT_GRAY):
39 gtk.EventBox.__init__(self)
40
41 # set color
42 style = self.get_style().copy()
43 style.bg[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(color, False, False)
44 self.set_style(style)
45
46 self.row = gtk.Table(1, 2, False)
47 self.row.set_border_width(10)
48 self.add(self.row)
49
50 total_rows = 0
51 if widget:
52 total_rows = 10
53 if varlist and vallist:
54 # pack the icon and the text on the left
55 total_rows += len(varlist)
56 self.table = gtk.Table(total_rows, 20, True)
57 self.table.set_row_spacings(6)
58 self.table.set_size_request(100, -1)
59 self.row.attach(self.table, 0, 1, 0, 1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL)
60
61 colid = 0
62 rowid = 0
63 self.line_widgets = {}
64 if icon:
65 self.table.attach(icon, colid, colid + 2, 0, 1)
66 colid = colid + 2
67 if widget:
68 self.table.attach(widget, colid, 20, 0, 10)
69 rowid = 10
70 if varlist and vallist:
71 for row in range(rowid, total_rows):
72 index = row - rowid
73 self.line_widgets[varlist[index]] = self.text2label(varlist[index], vallist[index])
74 self.table.attach(self.line_widgets[varlist[index]], colid, 20, row, row + 1)
75 # pack the button on the right
76 if button:
77 self.bbox = gtk.VBox()
78 self.bbox.pack_start(button, expand=True, fill=False)
79 if button2:
80 self.bbox.pack_start(button2, expand=True, fill=False)
81 self.bbox.set_size_request(150,-1)
82 self.row.attach(self.bbox, 1, 2, 0, 1, xoptions=gtk.FILL, yoptions=gtk.EXPAND)
83
84 def update_line_widgets(self, variable, value):
85 if len(self.line_widgets) == 0:
86 return
87 if not isinstance(self.line_widgets[variable], gtk.Label):
88 return
89 self.line_widgets[variable].set_markup(self.format_line(variable, value))
90
91 def wrap_line(self, inputs):
92 # wrap the long text of inputs
93 wrap_width_chars = 75
94 outputs = ""
95 tmps = inputs
96 less_chars = len(inputs)
97 while (less_chars - wrap_width_chars) > 0:
98 less_chars -= wrap_width_chars
99 outputs += tmps[:wrap_width_chars] + "\n "
100 tmps = inputs[less_chars:]
101 outputs += tmps
102 return outputs
103
104 def format_line(self, variable, value):
105 wraped_value = self.wrap_line(value)
106 markup = "<span weight=\'bold\'>%s</span>" % variable
107 markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % wraped_value
108 return markup
109
110 def text2label(self, variable, value):
111 # append the name:value to the left box
112 # such as "Name: hob-core-minimal-variant-2011-12-15-beagleboard"
113 label = gtk.Label()
114 label.set_alignment(0.0, 0.5)
115 label.set_markup(self.format_line(variable, value))
116 return label
117
118 class BuildDetailBox (gtk.EventBox):
119 def __init__(self, varlist = None, vallist = None, icon = None, color = HobColors.LIGHT_GRAY):
120 gtk.EventBox.__init__(self)
121
122 # set color
123 style = self.get_style().copy()
124 style.bg[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(color, False, False)
125 self.set_style(style)
126
127 self.hbox = gtk.HBox()
128 self.hbox.set_border_width(10)
129 self.add(self.hbox)
130
131 total_rows = 0
132 if varlist and vallist:
133 # pack the icon and the text on the left
134 total_rows += len(varlist)
135 self.table = gtk.Table(total_rows, 20, True)
136 self.table.set_row_spacings(6)
137 self.table.set_size_request(100, -1)
138 self.hbox.pack_start(self.table, expand=True, fill=True, padding=15)
139
140 colid = 0
141 rowid = 0
142 self.line_widgets = {}
143 if icon:
144 self.table.attach(icon, colid, colid + 2, 0, 1)
145 colid = colid + 2
146 if varlist and vallist:
147 for row in range(rowid, total_rows):
148 index = row - rowid
149 self.line_widgets[varlist[index]] = self.text2label(varlist[index], vallist[index])
150 self.table.attach(self.line_widgets[varlist[index]], colid, 20, row, row + 1)
151
152 def update_line_widgets(self, variable, value):
153 if len(self.line_widgets) == 0:
154 return
155 if not isinstance(self.line_widgets[variable], gtk.Label):
156 return
157 self.line_widgets[variable].set_markup(self.format_line(variable, value))
158
159 def wrap_line(self, inputs):
160 # wrap the long text of inputs
161 wrap_width_chars = 75
162 outputs = ""
163 tmps = inputs
164 less_chars = len(inputs)
165 while (less_chars - wrap_width_chars) > 0:
166 less_chars -= wrap_width_chars
167 outputs += tmps[:wrap_width_chars] + "\n "
168 tmps = inputs[less_chars:]
169 outputs += tmps
170 return outputs
171
172 def format_line(self, variable, value):
173 wraped_value = self.wrap_line(value)
174 markup = "<span weight=\'bold\'>%s</span>" % variable
175 markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % wraped_value
176 return markup
177
178 def text2label(self, variable, value):
179 # append the name:value to the left box
180 # such as "Name: hob-core-minimal-variant-2011-12-15-beagleboard"
181 label = gtk.Label()
182 label.set_alignment(0.0, 0.5)
183 label.set_markup(self.format_line(variable, value))
184 return label
185
186 def __init__(self, builder):
187 super(ImageDetailsPage, self).__init__(builder, "Image details")
188
189 self.image_store = []
190 self.button_ids = {}
191 self.details_bottom_buttons = gtk.HBox(False, 6)
192 self.image_saved = False
193 self.create_visual_elements()
194 self.name_field_template = ""
195 self.description_field_template = ""
196
197 def create_visual_elements(self):
198 # create visual elements
199 # create the toolbar
200 self.toolbar = gtk.Toolbar()
201 self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
202 self.toolbar.set_style(gtk.TOOLBAR_BOTH)
203
204 my_images_button = self.append_toolbar_button(self.toolbar,
205 "Images",
206 hic.ICON_IMAGES_DISPLAY_FILE,
207 hic.ICON_IMAGES_HOVER_FILE,
208 "Open previously built images",
209 self.my_images_button_clicked_cb)
210 settings_button = self.append_toolbar_button(self.toolbar,
211 "Settings",
212 hic.ICON_SETTINGS_DISPLAY_FILE,
213 hic.ICON_SETTINGS_HOVER_FILE,
214 "View additional build settings",
215 self.settings_button_clicked_cb)
216
217 self.details_top_buttons = self.add_onto_top_bar(self.toolbar)
218
219 def _remove_all_widget(self):
220 children = self.get_children() or []
221 for child in children:
222 self.remove(child)
223 children = self.box_group_area.get_children() or []
224 for child in children:
225 self.box_group_area.remove(child)
226 children = self.details_bottom_buttons.get_children() or []
227 for child in children:
228 self.details_bottom_buttons.remove(child)
229
230 def show_page(self, step):
231 self.build_succeeded = (step == self.builder.IMAGE_GENERATED)
232 image_addr = self.builder.parameters.image_addr
233 image_names = self.builder.parameters.image_names
234 if self.build_succeeded:
235 machine = self.builder.configuration.curr_mach
236 base_image = self.builder.recipe_model.get_selected_image()
237 layers = self.builder.configuration.layers
238 pkg_num = "%s" % len(self.builder.package_model.get_selected_packages())
239 log_file = self.builder.current_logfile
240 else:
241 pkg_num = "N/A"
242 log_file = None
243
244 # remove
245 for button_id, button in self.button_ids.items():
246 button.disconnect(button_id)
247 self._remove_all_widget()
248
249 # repack
250 self.pack_start(self.details_top_buttons, expand=False, fill=False)
251 self.pack_start(self.group_align, expand=True, fill=True)
252
253 self.build_result = None
254 if self.image_saved or (self.build_succeeded and self.builder.current_step == self.builder.IMAGE_GENERATING):
255 # building is the previous step
256 icon = gtk.Image()
257 pixmap_path = hic.ICON_INDI_CONFIRM_FILE
258 color = HobColors.RUNNING
259 pix_buffer = gtk.gdk.pixbuf_new_from_file(pixmap_path)
260 icon.set_from_pixbuf(pix_buffer)
261 varlist = [""]
262 if self.image_saved:
263 vallist = ["Your image recipe has been saved"]
264 else:
265 vallist = ["Your image is ready"]
266 self.build_result = self.BuildDetailBox(varlist=varlist, vallist=vallist, icon=icon, color=color)
267 self.box_group_area.pack_start(self.build_result, expand=False, fill=False)
268
269 self.buttonlist = ["Build new image", "Save image recipe", "Run image", "Deploy image"]
270
271 # Name
272 self.image_store = []
273 self.toggled_image = ""
274 default_image_size = 0
275 self.num_toggled = 0
276 i = 0
277 for image_name in image_names:
278 image_size = HobPage._size_to_string(os.stat(os.path.join(image_addr, image_name)).st_size)
279
280 image_attr = ("run" if (self.test_type_runnable(image_name) and self.test_mach_runnable(image_name)) else \
281 ("deploy" if self.test_deployable(image_name) else ""))
282 is_toggled = (image_attr != "")
283
284 if not self.toggled_image:
285 if i == (len(image_names) - 1):
286 is_toggled = True
287 if is_toggled:
288 default_image_size = image_size
289 self.toggled_image = image_name
290
291 split_stuff = image_name.split('.')
292 if "rootfs" in split_stuff:
293 image_type = image_name[(len(split_stuff[0]) + len(".rootfs") + 1):]
294 else:
295 image_type = image_name[(len(split_stuff[0]) + 1):]
296
297 self.image_store.append({'name': image_name,
298 'type': image_type,
299 'size': image_size,
300 'is_toggled': is_toggled,
301 'action_attr': image_attr,})
302
303 i = i + 1
304 self.num_toggled += is_toggled
305
306 is_runnable = self.create_bottom_buttons(self.buttonlist, self.toggled_image)
307
308 # Generated image files info
309 varlist = ["Name: ", "Files created: ", "Directory: "]
310 vallist = []
311
312 vallist.append(image_name.split('.')[0])
313 vallist.append(', '.join(fileitem['type'] for fileitem in self.image_store))
314 vallist.append(image_addr)
315
316 view_files_button = HobAltButton("View files")
317 view_files_button.connect("clicked", self.view_files_clicked_cb, image_addr)
318 view_files_button.set_tooltip_text("Open the directory containing the image files")
319 open_log_button = None
320 if log_file:
321 open_log_button = HobAltButton("Open log")
322 open_log_button.connect("clicked", self.open_log_clicked_cb, log_file)
323 open_log_button.set_tooltip_text("Open the build's log file")
324 self.image_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=view_files_button, button2=open_log_button)
325 self.box_group_area.pack_start(self.image_detail, expand=False, fill=True)
326
327 # The default kernel box for the qemu images
328 self.sel_kernel = ""
329 self.kernel_detail = None
330 if 'qemu' in image_name:
331 self.sel_kernel = self.get_kernel_file_name()
332
333 # varlist = ["Kernel: "]
334 # vallist = []
335 # vallist.append(self.sel_kernel)
336
337 # change_kernel_button = HobAltButton("Change")
338 # change_kernel_button.connect("clicked", self.change_kernel_cb)
339 # change_kernel_button.set_tooltip_text("Change qemu kernel file")
340 # self.kernel_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=change_kernel_button)
341 # self.box_group_area.pack_start(self.kernel_detail, expand=True, fill=True)
342
343 # Machine, Image recipe and Layers
344 layer_num_limit = 15
345 varlist = ["Machine: ", "Image recipe: ", "Layers: "]
346 vallist = []
347 self.setting_detail = None
348 if self.build_succeeded:
349 vallist.append(machine)
350 if self.builder.recipe_model.is_custom_image():
351 if self.builder.configuration.initial_selected_image == self.builder.recipe_model.__custom_image__:
352 base_image ="New image recipe"
353 else:
354 base_image = self.builder.configuration.initial_selected_image + " (edited)"
355 vallist.append(base_image)
356 i = 0
357 for layer in layers:
358 if i > layer_num_limit:
359 break
360 varlist.append(" - ")
361 i += 1
362 vallist.append("")
363 i = 0
364 for layer in layers:
365 if i > layer_num_limit:
366 break
367 elif i == layer_num_limit:
368 vallist.append("and more...")
369 else:
370 vallist.append(layer)
371 i += 1
372
373 edit_config_button = HobAltButton("Edit configuration")
374 edit_config_button.set_tooltip_text("Edit machine and image recipe")
375 edit_config_button.connect("clicked", self.edit_config_button_clicked_cb)
376 self.setting_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=edit_config_button)
377 self.box_group_area.pack_start(self.setting_detail, expand=True, fill=True)
378
379 # Packages included, and Total image size
380 varlist = ["Packages included: ", "Total image size: "]
381 vallist = []
382 vallist.append(pkg_num)
383 vallist.append(default_image_size)
384 self.builder.configuration.image_size = default_image_size
385 self.builder.configuration.image_packages = self.builder.configuration.selected_packages
386 if self.build_succeeded:
387 edit_packages_button = HobAltButton("Edit packages")
388 edit_packages_button.set_tooltip_text("Edit the packages included in your image")
389 edit_packages_button.connect("clicked", self.edit_packages_button_clicked_cb)
390 else: # get to this page from "My images"
391 edit_packages_button = None
392 self.package_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=edit_packages_button)
393 self.box_group_area.pack_start(self.package_detail, expand=True, fill=True)
394
395 # pack the buttons at the bottom, at this time they are already created.
396 if self.build_succeeded:
397 self.box_group_area.pack_end(self.details_bottom_buttons, expand=False, fill=False)
398 else: # for "My images" page
399 self.details_separator = gtk.HSeparator()
400 self.box_group_area.pack_start(self.details_separator, expand=False, fill=False)
401 self.box_group_area.pack_start(self.details_bottom_buttons, expand=False, fill=False)
402
403 self.show_all()
404 if self.kernel_detail and (not is_runnable):
405 self.kernel_detail.hide()
406 self.image_saved = False
407
408 def view_files_clicked_cb(self, button, image_addr):
409 subprocess.call("xdg-open /%s" % image_addr, shell=True)
410
411 def open_log_clicked_cb(self, button, log_file):
412 if log_file:
413 log_file = "file:///" + log_file
414 gtk.show_uri(screen=button.get_screen(), uri=log_file, timestamp=0)
415
416 def refresh_package_detail_box(self, image_size):
417 self.package_detail.update_line_widgets("Total image size: ", image_size)
418
419 def test_type_runnable(self, image_name):
420 type_runnable = False
421 for t in self.builder.parameters.runnable_image_types:
422 if image_name.endswith(t):
423 type_runnable = True
424 break
425 return type_runnable
426
427 def test_mach_runnable(self, image_name):
428 mach_runnable = False
429 for t in self.builder.parameters.runnable_machine_patterns:
430 if t in image_name:
431 mach_runnable = True
432 break
433 return mach_runnable
434
435 def test_deployable(self, image_name):
436 if self.builder.configuration.curr_mach.startswith("qemu"):
437 return False
438 deployable = False
439 for t in self.builder.parameters.deployable_image_types:
440 if image_name.endswith(t):
441 deployable = True
442 break
443 return deployable
444
445 def get_kernel_file_name(self, kernel_addr=""):
446 kernel_name = ""
447
448 if not kernel_addr:
449 kernel_addr = self.builder.parameters.image_addr
450
451 files = [f for f in os.listdir(kernel_addr) if f[0] <> '.']
452 for check_file in files:
453 if check_file.endswith(".bin"):
454 name_splits = check_file.split(".")[0]
455 if self.builder.parameters.kernel_image_type in name_splits.split("-"):
456 kernel_name = check_file
457 break
458
459 return kernel_name
460
461 def show_builded_images_dialog(self, widget, primary_action=""):
462 title = primary_action if primary_action else "Your builded images"
463 dialog = CrumbsDialog(title, self.builder,
464 gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
465 dialog.set_border_width(12)
466
467 label = gtk.Label()
468 label.set_use_markup(True)
469 label.set_alignment(0.0, 0.5)
470 label.set_padding(12,0)
471 if primary_action == "Run image":
472 label.set_markup("<span font_desc='12'>Select the image file you want to run:</span>")
473 elif primary_action == "Deploy image":
474 label.set_markup("<span font_desc='12'>Select the image file you want to deploy:</span>")
475 else:
476 label.set_markup("<span font_desc='12'>Select the image file you want to %s</span>" % primary_action)
477 dialog.vbox.pack_start(label, expand=False, fill=False)
478
479 # filter created images as action attribution (deploy or run)
480 action_attr = ""
481 action_images = []
482 for fileitem in self.image_store:
483 action_attr = fileitem['action_attr']
484 if (action_attr == 'run' and primary_action == "Run image") \
485 or (action_attr == 'deploy' and primary_action == "Deploy image"):
486 action_images.append(fileitem)
487
488 # pack the corresponding 'runnable' or 'deploy' radio_buttons, if there has no more than one file.
489 # assume that there does not both have 'deploy' and 'runnable' files in the same building result
490 # in possible as design.
491 curr_row = 0
492 rows = (len(action_images)) if len(action_images) < 10 else 10
493 table = gtk.Table(rows, 10, True)
494 table.set_row_spacings(6)
495 table.set_col_spacing(0, 12)
496 table.set_col_spacing(5, 12)
497
498 sel_parent_btn = None
499 for fileitem in action_images:
500 sel_btn = gtk.RadioButton(sel_parent_btn, fileitem['type'])
501 sel_parent_btn = sel_btn if not sel_parent_btn else sel_parent_btn
502 sel_btn.set_active(fileitem['is_toggled'])
503 sel_btn.connect('toggled', self.table_selected_cb, fileitem)
504 if curr_row < 10:
505 table.attach(sel_btn, 0, 4, curr_row, curr_row + 1, xpadding=24)
506 else:
507 table.attach(sel_btn, 5, 9, curr_row - 10, curr_row - 9, xpadding=24)
508 curr_row += 1
509
510 dialog.vbox.pack_start(table, expand=False, fill=False, padding=6)
511
512 button = dialog.add_button("Cancel", gtk.RESPONSE_CANCEL)
513 HobAltButton.style_button(button)
514
515 if primary_action:
516 button = dialog.add_button(primary_action, gtk.RESPONSE_YES)
517 HobButton.style_button(button)
518
519 dialog.show_all()
520
521 response = dialog.run()
522 dialog.destroy()
523
524 if response != gtk.RESPONSE_YES:
525 return
526
527 for fileitem in self.image_store:
528 if fileitem['is_toggled']:
529 if fileitem['action_attr'] == 'run':
530 self.builder.runqemu_image(fileitem['name'], self.sel_kernel)
531 elif fileitem['action_attr'] == 'deploy':
532 self.builder.deploy_image(fileitem['name'])
533
534 def table_selected_cb(self, tbutton, image):
535 image['is_toggled'] = tbutton.get_active()
536 if image['is_toggled']:
537 self.toggled_image = image['name']
538
539 def change_kernel_cb(self, widget):
540 kernel_path = self.builder.show_load_kernel_dialog()
541 if kernel_path and self.kernel_detail:
542 import os.path
543 self.sel_kernel = os.path.basename(kernel_path)
544 markup = self.kernel_detail.format_line("Kernel: ", self.sel_kernel)
545 label = ((self.kernel_detail.get_children()[0]).get_children()[0]).get_children()[0]
546 label.set_markup(markup)
547
548 def create_bottom_buttons(self, buttonlist, image_name):
549 # Create the buttons at the bottom
550 created = False
551 packed = False
552 self.button_ids = {}
553 is_runnable = False
554
555 # create button "Deploy image"
556 name = "Deploy image"
557 if name in buttonlist and self.test_deployable(image_name):
558 deploy_button = HobButton('Deploy image')
559 #deploy_button.set_size_request(205, 49)
560 deploy_button.set_tooltip_text("Burn a live image to a USB drive or flash memory")
561 deploy_button.set_flags(gtk.CAN_DEFAULT)
562 button_id = deploy_button.connect("clicked", self.deploy_button_clicked_cb)
563 self.button_ids[button_id] = deploy_button
564 self.details_bottom_buttons.pack_end(deploy_button, expand=False, fill=False)
565 created = True
566 packed = True
567
568 name = "Run image"
569 if name in buttonlist and self.test_type_runnable(image_name) and self.test_mach_runnable(image_name):
570 if created == True:
571 # separator
572 #label = gtk.Label(" or ")
573 #self.details_bottom_buttons.pack_end(label, expand=False, fill=False)
574
575 # create button "Run image"
576 run_button = HobAltButton("Run image")
577 else:
578 # create button "Run image" as the primary button
579 run_button = HobButton("Run image")
580 #run_button.set_size_request(205, 49)
581 run_button.set_flags(gtk.CAN_DEFAULT)
582 packed = True
583 run_button.set_tooltip_text("Start up an image with qemu emulator")
584 button_id = run_button.connect("clicked", self.run_button_clicked_cb)
585 self.button_ids[button_id] = run_button
586 self.details_bottom_buttons.pack_end(run_button, expand=False, fill=False)
587 created = True
588 is_runnable = True
589
590 name = "Save image recipe"
591 if name in buttonlist and self.builder.recipe_model.is_custom_image():
592 save_button = HobAltButton("Save image recipe")
593 save_button.set_tooltip_text("Keep your changes saving them as an image recipe")
594 save_button.set_sensitive(not self.image_saved)
595 button_id = save_button.connect("clicked", self.save_button_clicked_cb)
596 self.button_ids[button_id] = save_button
597 self.details_bottom_buttons.pack_end(save_button, expand=False, fill=False)
598
599 name = "Build new image"
600 if name in buttonlist:
601 # create button "Build new image"
602 if packed:
603 build_new_button = HobAltButton("Build new image")
604 else:
605 build_new_button = HobButton("Build new image")
606 build_new_button.set_flags(gtk.CAN_DEFAULT)
607 #build_new_button.set_size_request(205, 49)
608 self.details_bottom_buttons.pack_end(build_new_button, expand=False, fill=False)
609 build_new_button.set_tooltip_text("Create a new image from scratch")
610 button_id = build_new_button.connect("clicked", self.build_new_button_clicked_cb)
611 self.button_ids[button_id] = build_new_button
612
613 return is_runnable
614
615 def deploy_button_clicked_cb(self, button):
616 if self.toggled_image:
617 if self.num_toggled > 1:
618 self.set_sensitive(False)
619 self.show_builded_images_dialog(None, "Deploy image")
620 self.set_sensitive(True)
621 else:
622 self.builder.deploy_image(self.toggled_image)
623
624 def run_button_clicked_cb(self, button):
625 if self.toggled_image:
626 if self.num_toggled > 1:
627 self.set_sensitive(False)
628 self.show_builded_images_dialog(None, "Run image")
629 self.set_sensitive(True)
630 else:
631 self.builder.runqemu_image(self.toggled_image, self.sel_kernel)
632
633 def save_button_clicked_cb(self, button):
634 topdir = self.builder.get_topdir()
635 images_dir = topdir + "/recipes/images/custom/"
636 self.builder.ensure_dir(images_dir)
637
638 self.name_field_template = self.builder.image_configuration_page.custom_image_selected
639 if self.name_field_template:
640 image_path = self.builder.recipe_model.pn_path[self.name_field_template]
641 image_iter = self.builder.recipe_model.get_iter(image_path)
642 self.description_field_template = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_DESC)
643 else:
644 self.name_field_template = ""
645
646 dialog = SaveImageDialog(images_dir, self.name_field_template, self.description_field_template,
647 "Save image recipe", self.builder, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
648 response = dialog.run()
649 dialog.destroy()
650
651 def build_new_button_clicked_cb(self, button):
652 self.builder.initiate_new_build_async()
653
654 def edit_config_button_clicked_cb(self, button):
655 self.builder.show_configuration()
656
657 def edit_packages_button_clicked_cb(self, button):
658 self.builder.show_packages()
659
660 def my_images_button_clicked_cb(self, button):
661 self.builder.show_load_my_images_dialog()
662
663 def settings_button_clicked_cb(self, button):
664 # Create an advanced settings dialog
665 response, settings_changed = self.builder.show_simple_settings_dialog()
666 if not response:
667 return
668 if settings_changed:
669 self.builder.reparse_post_adv_settings()
diff --git a/bitbake/lib/bb/ui/crumbs/packageselectionpage.py b/bitbake/lib/bb/ui/crumbs/packageselectionpage.py
new file mode 100755
index 0000000000..7c62b36e6b
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/packageselectionpage.py
@@ -0,0 +1,355 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import glib
25from bb.ui.crumbs.hobcolor import HobColors
26from bb.ui.crumbs.hobwidget import HobViewTable, HobNotebook, HobAltButton, HobButton
27from bb.ui.crumbs.hoblistmodel import PackageListModel
28from bb.ui.crumbs.hobpages import HobPage
29
30#
31# PackageSelectionPage
32#
33class PackageSelectionPage (HobPage):
34
35 pages = [
36 {
37 'name' : 'Included packages',
38 'tooltip' : 'The packages currently included for your image',
39 'filter' : { PackageListModel.COL_INC : [True] },
40 'search' : 'Search packages by name',
41 'searchtip' : 'Enter a package name to find it',
42 'columns' : [{
43 'col_name' : 'Package name',
44 'col_id' : PackageListModel.COL_NAME,
45 'col_style': 'text',
46 'col_min' : 100,
47 'col_max' : 300,
48 'expand' : 'True'
49 }, {
50 'col_name' : 'Size',
51 'col_id' : PackageListModel.COL_SIZE,
52 'col_style': 'text',
53 'col_min' : 100,
54 'col_max' : 300,
55 'expand' : 'True'
56 }, {
57 'col_name' : 'Recipe',
58 'col_id' : PackageListModel.COL_RCP,
59 'col_style': 'text',
60 'col_min' : 100,
61 'col_max' : 250,
62 'expand' : 'True'
63 }, {
64 'col_name' : 'Brought in by (+others)',
65 'col_id' : PackageListModel.COL_BINB,
66 'col_style': 'binb',
67 'col_min' : 100,
68 'col_max' : 350,
69 'expand' : 'True'
70 }, {
71 'col_name' : 'Included',
72 'col_id' : PackageListModel.COL_INC,
73 'col_style': 'check toggle',
74 'col_min' : 100,
75 'col_max' : 100
76 }]
77 }, {
78 'name' : 'All packages',
79 'tooltip' : 'All packages that have been built',
80 'filter' : {},
81 'search' : 'Search packages by name',
82 'searchtip' : 'Enter a package name to find it',
83 'columns' : [{
84 'col_name' : 'Package name',
85 'col_id' : PackageListModel.COL_NAME,
86 'col_style': 'text',
87 'col_min' : 100,
88 'col_max' : 400,
89 'expand' : 'True'
90 }, {
91 'col_name' : 'Size',
92 'col_id' : PackageListModel.COL_SIZE,
93 'col_style': 'text',
94 'col_min' : 100,
95 'col_max' : 500,
96 'expand' : 'True'
97 }, {
98 'col_name' : 'Recipe',
99 'col_id' : PackageListModel.COL_RCP,
100 'col_style': 'text',
101 'col_min' : 100,
102 'col_max' : 250,
103 'expand' : 'True'
104 }, {
105 'col_name' : 'Included',
106 'col_id' : PackageListModel.COL_INC,
107 'col_style': 'check toggle',
108 'col_min' : 100,
109 'col_max' : 100
110 }]
111 }
112 ]
113
114 (INCLUDED,
115 ALL) = range(2)
116
117 def __init__(self, builder):
118 super(PackageSelectionPage, self).__init__(builder, "Edit packages")
119
120 # set invisible members
121 self.recipe_model = self.builder.recipe_model
122 self.package_model = self.builder.package_model
123
124 # create visual elements
125 self.create_visual_elements()
126
127 def included_clicked_cb(self, button):
128 self.ins.set_current_page(self.INCLUDED)
129
130 def create_visual_elements(self):
131 self.label = gtk.Label("Packages included: 0\nSelected packages size: 0 MB")
132 self.eventbox = self.add_onto_top_bar(self.label, 73)
133 self.pack_start(self.eventbox, expand=False, fill=False)
134 self.pack_start(self.group_align, expand=True, fill=True)
135
136 # set visible members
137 self.ins = HobNotebook()
138 self.tables = [] # we need to modify table when the dialog is shown
139
140 search_names = []
141 search_tips = []
142 # append the tab
143 for page in self.pages:
144 columns = page['columns']
145 name = page['name']
146 tab = HobViewTable(columns, name)
147 search_names.append(page['search'])
148 search_tips.append(page['searchtip'])
149 filter = page['filter']
150 sort_model = self.package_model.tree_model(filter, initial=True)
151 tab.set_model(sort_model)
152 tab.connect("toggled", self.table_toggled_cb, name)
153 tab.connect("button-release-event", self.button_click_cb)
154 tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include, filter)
155 self.ins.append_page(tab, page['name'], page['tooltip'])
156 self.tables.append(tab)
157
158 self.ins.set_entry(search_names, search_tips)
159 self.ins.search.connect("changed", self.search_entry_changed)
160
161 # add all into the dialog
162 self.box_group_area.pack_start(self.ins, expand=True, fill=True)
163
164 self.button_box = gtk.HBox(False, 6)
165 self.box_group_area.pack_start(self.button_box, expand=False, fill=False)
166
167 self.build_image_button = HobButton('Build image')
168 #self.build_image_button.set_size_request(205, 49)
169 self.build_image_button.set_tooltip_text("Build target image")
170 self.build_image_button.set_flags(gtk.CAN_DEFAULT)
171 self.build_image_button.grab_default()
172 self.build_image_button.connect("clicked", self.build_image_clicked_cb)
173 self.button_box.pack_end(self.build_image_button, expand=False, fill=False)
174
175 self.back_button = HobAltButton('Cancel')
176 self.back_button.connect("clicked", self.back_button_clicked_cb)
177 self.button_box.pack_end(self.back_button, expand=False, fill=False)
178
179 def search_entry_changed(self, entry):
180 text = entry.get_text()
181 if self.ins.search_focus:
182 self.ins.search_focus = False
183 elif self.ins.page_changed:
184 self.ins.page_change = False
185 self.filter_search(entry)
186 elif text not in self.ins.search_names:
187 self.filter_search(entry)
188
189 def filter_search(self, entry):
190 text = entry.get_text()
191 current_tab = self.ins.get_current_page()
192 filter = self.pages[current_tab]['filter']
193 filter[PackageListModel.COL_NAME] = text
194 self.tables[current_tab].set_model(self.package_model.tree_model(filter, search_data=text))
195 if self.package_model.filtered_nb == 0:
196 if not self.ins.get_nth_page(current_tab).top_bar:
197 self.ins.get_nth_page(current_tab).add_no_result_bar(entry)
198 self.ins.get_nth_page(current_tab).top_bar.set_no_show_all(True)
199 self.ins.get_nth_page(current_tab).top_bar.show()
200 self.ins.get_nth_page(current_tab).scroll.hide()
201 else:
202 if self.ins.get_nth_page(current_tab).top_bar:
203 self.ins.get_nth_page(current_tab).top_bar.hide()
204 self.ins.get_nth_page(current_tab).scroll.show()
205 if entry.get_text() == '':
206 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
207 else:
208 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, True)
209
210 def button_click_cb(self, widget, event):
211 path, col = widget.table_tree.get_cursor()
212 tree_model = widget.table_tree.get_model()
213 if path and col.get_title() != 'Included': # else activation is likely a removal
214 properties = {'binb': '' , 'name': '', 'size':'', 'recipe':'', 'files_list':''}
215 properties['binb'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_BINB)
216 properties['name'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_NAME)
217 properties['size'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_SIZE)
218 properties['recipe'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_RCP)
219 properties['files_list'] = tree_model.get_value(tree_model.get_iter(path), PackageListModel.COL_FLIST)
220
221 self.builder.show_recipe_property_dialog(properties)
222
223 def open_log_clicked_cb(self, button, log_file):
224 if log_file:
225 log_file = "file:///" + log_file
226 gtk.show_uri(screen=button.get_screen(), uri=log_file, timestamp=0)
227
228 def show_page(self, log_file):
229 children = self.button_box.get_children() or []
230 for child in children:
231 self.button_box.remove(child)
232 # re-packed the buttons as request, add the 'open log' button if build success
233 self.button_box.pack_end(self.build_image_button, expand=False, fill=False)
234 if log_file:
235 open_log_button = HobAltButton("Open log")
236 open_log_button.connect("clicked", self.open_log_clicked_cb, log_file)
237 open_log_button.set_tooltip_text("Open the build's log file")
238 self.button_box.pack_end(open_log_button, expand=False, fill=False)
239 self.button_box.pack_end(self.back_button, expand=False, fill=False)
240 self.show_all()
241
242 def build_image_clicked_cb(self, button):
243 self.builder.parsing_warnings = []
244 self.builder.build_image()
245
246 def refresh_tables(self):
247 self.ins.reset_entry(self.ins.search, 0)
248 for tab in self.tables:
249 index = self.tables.index(tab)
250 filter = self.pages[index]['filter']
251 tab.set_model(self.package_model.tree_model(filter, initial=True))
252
253 def back_button_clicked_cb(self, button):
254 if self.builder.previous_step == self.builder.IMAGE_GENERATED:
255 self.builder.restore_initial_selected_packages()
256 self.refresh_selection()
257 self.builder.show_image_details()
258 else:
259 self.builder.show_configuration()
260 self.refresh_tables()
261
262 def refresh_selection(self):
263 self.builder.configuration.selected_packages = self.package_model.get_selected_packages()
264 self.builder.configuration.user_selected_packages = self.package_model.get_user_selected_packages()
265 selected_packages_num = len(self.builder.configuration.selected_packages)
266 selected_packages_size = self.package_model.get_packages_size()
267 selected_packages_size_str = HobPage._size_to_string(selected_packages_size)
268
269 if self.builder.configuration.image_packages == self.builder.configuration.selected_packages:
270 image_total_size_str = self.builder.configuration.image_size
271 else:
272 image_overhead_factor = self.builder.configuration.image_overhead_factor
273 image_rootfs_size = self.builder.configuration.image_rootfs_size / 1024 # image_rootfs_size is KB
274 image_extra_size = self.builder.configuration.image_extra_size / 1024 # image_extra_size is KB
275 base_size = image_overhead_factor * selected_packages_size
276 image_total_size = max(base_size, image_rootfs_size) + image_extra_size
277 if "zypper" in self.builder.configuration.selected_packages:
278 image_total_size += (51200 * 1024)
279 image_total_size_str = HobPage._size_to_string(image_total_size)
280
281 self.label.set_label("Packages included: %s\nSelected packages size: %s\nEstimated image size: %s" %
282 (selected_packages_num, selected_packages_size_str, image_total_size_str))
283 self.ins.show_indicator_icon("Included packages", selected_packages_num)
284
285 def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
286 if not self.package_model.path_included(path):
287 self.package_model.include_item(item_path=path, binb="User Selected")
288 else:
289 self.pre_fadeout_checkout_include(view_tree)
290 self.package_model.exclude_item(item_path=path)
291 self.render_fadeout(view_tree, cell)
292
293 self.refresh_selection()
294 if not self.builder.customized:
295 self.builder.customized = True
296 self.builder.set_base_image()
297 self.builder.configuration.selected_image = self.recipe_model.__custom_image__
298 self.builder.rcppkglist_populated()
299
300 self.builder.window_sensitive(True)
301 view_model = view_tree.get_model()
302 vpath = self.package_model.convert_path_to_vpath(view_model, path)
303 view_tree.set_cursor(vpath)
304
305 def table_toggled_cb(self, table, cell, view_path, toggled_columnid, view_tree, pagename):
306 # Click to include a package
307 self.builder.window_sensitive(False)
308 view_model = view_tree.get_model()
309 path = self.package_model.convert_vpath_to_path(view_model, view_path)
310 glib.idle_add(self.toggle_item_idle_cb, path, view_tree, cell, pagename)
311
312 def pre_fadeout_checkout_include(self, tree):
313 #after the fadeout the table will be sorted as before
314 self.sort_column_id = self.package_model.sort_column_id
315 self.sort_order = self.package_model.sort_order
316
317 self.package_model.resync_fadeout_column(self.package_model.get_iter_first())
318 # Check out a model which base on the column COL_FADE_INC,
319 # it's save the prev state of column COL_INC before do exclude_item
320 filter = { PackageListModel.COL_FADE_INC : [True]}
321 new_model = self.package_model.tree_model(filter, excluded_items_ahead=True)
322 tree.set_model(new_model)
323 tree.expand_all()
324
325 def get_excluded_rows(self, to_render_cells, model, it):
326 while it:
327 path = model.get_path(it)
328 prev_cell_is_active = model.get_value(it, PackageListModel.COL_FADE_INC)
329 curr_cell_is_active = model.get_value(it, PackageListModel.COL_INC)
330 if (prev_cell_is_active == True) and (curr_cell_is_active == False):
331 to_render_cells.append(path)
332 if model.iter_has_child(it):
333 self.get_excluded_rows(to_render_cells, model, model.iter_children(it))
334 it = model.iter_next(it)
335
336 return to_render_cells
337
338 def render_fadeout(self, tree, cell):
339 if (not cell) or (not tree):
340 return
341 to_render_cells = []
342 view_model = tree.get_model()
343 self.get_excluded_rows(to_render_cells, view_model, view_model.get_iter_first())
344
345 cell.fadeout(tree, 1000, to_render_cells)
346
347 def after_fadeout_checkin_include(self, table, ctrl, cell, tree, filter):
348 self.package_model.sort_column_id = self.sort_column_id
349 self.package_model.sort_order = self.sort_order
350 tree.set_model(self.package_model.tree_model(filter))
351 tree.expand_all()
352
353 def set_packages_curr_tab(self, curr_page):
354 self.ins.set_current_page(curr_page)
355
diff --git a/bitbake/lib/bb/ui/crumbs/persistenttooltip.py b/bitbake/lib/bb/ui/crumbs/persistenttooltip.py
new file mode 100644
index 0000000000..927c194292
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/persistenttooltip.py
@@ -0,0 +1,186 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2012 Intel Corporation
5#
6# Authored by Joshua Lock <josh@linux.intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gobject
22import gtk
23try:
24 import gconf
25except:
26 pass
27
28class PersistentTooltip(gtk.Window):
29 """
30 A tooltip which persists once shown until the user dismisses it with the Esc
31 key or by clicking the close button.
32
33 # FIXME: the PersistentTooltip should be disabled when the user clicks anywhere off
34 # it. We can't do this with focus-out-event becuase modal ensures we have focus?
35
36 markup: some Pango text markup to display in the tooltip
37 """
38 def __init__(self, markup, parent_win=None):
39 gtk.Window.__init__(self, gtk.WINDOW_POPUP)
40
41 # Inherit the system theme for a tooltip
42 style = gtk.rc_get_style_by_paths(gtk.settings_get_default(),
43 'gtk-tooltip', 'gtk-tooltip', gobject.TYPE_NONE)
44 self.set_style(style)
45
46 # The placement of the close button on the tip should reflect how the
47 # window manager of the users system places close buttons. Try to read
48 # the metacity gconf key to determine whether the close button is on the
49 # left or the right.
50 # In the case that we can't determine the users configuration we default
51 # to close buttons being on the right.
52 __button_right = True
53 try:
54 client = gconf.client_get_default()
55 order = client.get_string("/apps/metacity/general/button_layout")
56 if order and order.endswith(":"):
57 __button_right = False
58 except NameError:
59 pass
60
61 # We need to ensure we're only shown once
62 self.shown = False
63
64 # We don't want any WM decorations
65 self.set_decorated(False)
66 # We don't want to show in the taskbar or window switcher
67 self.set_skip_pager_hint(True)
68 self.set_skip_taskbar_hint(True)
69 # We must be modal to ensure we grab focus when presented from a gtk.Dialog
70 self.set_modal(True)
71
72 self.set_border_width(0)
73 self.set_position(gtk.WIN_POS_MOUSE)
74 self.set_opacity(0.95)
75
76 # Ensure a reasonable minimum size
77 self.set_geometry_hints(self, 100, 50)
78
79 # Set this window as a transient window for parent(main window)
80 if parent_win:
81 self.set_transient_for(parent_win)
82 self.set_destroy_with_parent(True)
83 # Draw our label and close buttons
84 hbox = gtk.HBox(False, 0)
85 hbox.show()
86 self.add(hbox)
87
88 img = gtk.Image()
89 img.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_BUTTON)
90
91 self.button = gtk.Button()
92 self.button.set_image(img)
93 self.button.connect("clicked", self._dismiss_cb)
94 self.button.set_flags(gtk.CAN_DEFAULT)
95 self.button.grab_focus()
96 self.button.show()
97 vbox = gtk.VBox(False, 0)
98 vbox.show()
99 vbox.pack_start(self.button, False, False, 0)
100 if __button_right:
101 hbox.pack_end(vbox, True, True, 0)
102 else:
103 hbox.pack_start(vbox, True, True, 0)
104
105 self.set_default(self.button)
106
107 bin = gtk.HBox(True, 6)
108 bin.set_border_width(6)
109 bin.show()
110 self.label = gtk.Label()
111 self.label.set_line_wrap(True)
112 # We want to match the colours of the normal tooltips, as dictated by
113 # the users gtk+-2.0 theme, wherever possible - on some systems this
114 # requires explicitly setting a fg_color for the label which matches the
115 # tooltip_fg_color
116 settings = gtk.settings_get_default()
117 colours = settings.get_property('gtk-color-scheme').split('\n')
118 # remove any empty lines, there's likely to be a trailing one after
119 # calling split on a dictionary-like string
120 colours = filter(None, colours)
121 for col in colours:
122 item, val = col.split(': ')
123 if item == 'tooltip_fg_color':
124 style = self.label.get_style()
125 style.fg[gtk.STATE_NORMAL] = gtk.gdk.color_parse(val)
126 self.label.set_style(style)
127 break # we only care for the tooltip_fg_color
128
129 self.label.set_markup(markup)
130 self.label.show()
131 bin.add(self.label)
132 hbox.pack_end(bin, True, True, 6)
133
134 # add the original URL display for user reference
135 if 'a href' in markup:
136 hbox.set_tooltip_text(self.get_markup_url(markup))
137 hbox.show()
138
139 self.connect("key-press-event", self._catch_esc_cb)
140
141 """
142 Callback when the PersistentTooltip's close button is clicked.
143 Hides the PersistentTooltip.
144 """
145 def _dismiss_cb(self, button):
146 self.hide()
147 return True
148
149 """
150 Callback when the Esc key is detected. Hides the PersistentTooltip.
151 """
152 def _catch_esc_cb(self, widget, event):
153 keyname = gtk.gdk.keyval_name(event.keyval)
154 if keyname == "Escape":
155 self.hide()
156 return True
157
158 """
159 Called to present the PersistentTooltip.
160 Overrides the superclasses show() method to include state tracking.
161 """
162 def show(self):
163 if not self.shown:
164 self.shown = True
165 gtk.Window.show(self)
166
167 """
168 Called to hide the PersistentTooltip.
169 Overrides the superclasses hide() method to include state tracking.
170 """
171 def hide(self):
172 self.shown = False
173 gtk.Window.hide(self)
174
175 """
176 Called to get the hyperlink URL from markup text.
177 """
178 def get_markup_url(self, markup):
179 url = "http:"
180 if markup and type(markup) == str:
181 s = markup
182 if 'http:' in s:
183 import re
184 url = re.search('(http:[^,\\ "]+)', s).group(0)
185
186 return url
diff --git a/bitbake/lib/bb/ui/crumbs/progress.py b/bitbake/lib/bb/ui/crumbs/progress.py
new file mode 100644
index 0000000000..1d28a111b3
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/progress.py
@@ -0,0 +1,23 @@
1import gtk
2
3class ProgressBar(gtk.Dialog):
4 def __init__(self, parent):
5
6 gtk.Dialog.__init__(self, flags=(gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT))
7 self.set_title("Parsing metadata, please wait...")
8 self.set_default_size(500, 0)
9 self.set_transient_for(parent)
10 self.progress = gtk.ProgressBar()
11 self.vbox.pack_start(self.progress)
12 self.show_all()
13
14 def set_text(self, msg):
15 self.progress.set_text(msg)
16
17 def update(self, x, y):
18 self.progress.set_fraction(float(x)/float(y))
19 self.progress.set_text("%2d %%" % (x*100/y))
20
21 def pulse(self):
22 self.progress.set_text("Loading...")
23 self.progress.pulse()
diff --git a/bitbake/lib/bb/ui/crumbs/progressbar.py b/bitbake/lib/bb/ui/crumbs/progressbar.py
new file mode 100644
index 0000000000..3e2c660e4a
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/progressbar.py
@@ -0,0 +1,59 @@
1# BitBake Graphical GTK User Interface
2#
3# Copyright (C) 2011 Intel Corporation
4#
5# Authored by Shane Wang <shane.wang@intel.com>
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20import gtk
21from bb.ui.crumbs.hobcolor import HobColors
22
23class HobProgressBar (gtk.ProgressBar):
24 def __init__(self):
25 gtk.ProgressBar.__init__(self)
26 self.set_rcstyle(True)
27 self.percentage = 0
28
29 def set_rcstyle(self, status):
30 rcstyle = gtk.RcStyle()
31 rcstyle.fg[2] = gtk.gdk.Color(HobColors.BLACK)
32 if status == "stop":
33 rcstyle.bg[3] = gtk.gdk.Color(HobColors.WARNING)
34 elif status == "fail":
35 rcstyle.bg[3] = gtk.gdk.Color(HobColors.ERROR)
36 else:
37 rcstyle.bg[3] = gtk.gdk.Color(HobColors.RUNNING)
38 self.modify_style(rcstyle)
39
40 def set_title(self, text=None):
41 if not text:
42 text = ""
43 text += " %.0f%%" % self.percentage
44 self.set_text(text)
45
46 def set_stop_title(self, text=None):
47 if not text:
48 text = ""
49 self.set_text(text)
50
51 def reset(self):
52 self.set_fraction(0)
53 self.set_text("")
54 self.set_rcstyle(True)
55 self.percentage = 0
56
57 def update(self, fraction):
58 self.percentage = int(fraction * 100)
59 self.set_fraction(fraction)
diff --git a/bitbake/lib/bb/ui/crumbs/puccho.glade b/bitbake/lib/bb/ui/crumbs/puccho.glade
new file mode 100644
index 0000000000..d7553a6e14
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/puccho.glade
@@ -0,0 +1,606 @@
1<?xml version="1.0" encoding="UTF-8" standalone="no"?>
2<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
3<!--Generated with glade3 3.4.5 on Mon Nov 10 12:24:12 2008 -->
4<glade-interface>
5 <widget class="GtkDialog" id="build_dialog">
6 <property name="title" translatable="yes">Start a build</property>
7 <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
8 <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
9 <property name="has_separator">False</property>
10 <child internal-child="vbox">
11 <widget class="GtkVBox" id="dialog-vbox1">
12 <property name="visible">True</property>
13 <property name="spacing">2</property>
14 <child>
15 <widget class="GtkTable" id="build_table">
16 <property name="visible">True</property>
17 <property name="border_width">6</property>
18 <property name="n_rows">7</property>
19 <property name="n_columns">3</property>
20 <property name="column_spacing">5</property>
21 <property name="row_spacing">6</property>
22 <child>
23 <widget class="GtkAlignment" id="status_alignment">
24 <property name="visible">True</property>
25 <property name="left_padding">12</property>
26 <child>
27 <widget class="GtkHBox" id="status_hbox">
28 <property name="spacing">6</property>
29 <child>
30 <widget class="GtkImage" id="status_image">
31 <property name="visible">True</property>
32 <property name="no_show_all">True</property>
33 <property name="xalign">0</property>
34 <property name="stock">gtk-dialog-error</property>
35 </widget>
36 <packing>
37 <property name="expand">False</property>
38 <property name="fill">False</property>
39 </packing>
40 </child>
41 <child>
42 <widget class="GtkLabel" id="status_label">
43 <property name="visible">True</property>
44 <property name="xalign">0</property>
45 <property name="label" translatable="yes">If you see this text something is wrong...</property>
46 <property name="use_markup">True</property>
47 <property name="use_underline">True</property>
48 </widget>
49 <packing>
50 <property name="position">1</property>
51 </packing>
52 </child>
53 </widget>
54 </child>
55 </widget>
56 <packing>
57 <property name="right_attach">3</property>
58 <property name="top_attach">2</property>
59 <property name="bottom_attach">3</property>
60 </packing>
61 </child>
62 <child>
63 <widget class="GtkLabel" id="label2">
64 <property name="visible">True</property>
65 <property name="xalign">0</property>
66 <property name="label" translatable="yes">&lt;b&gt;Build configuration&lt;/b&gt;</property>
67 <property name="use_markup">True</property>
68 </widget>
69 <packing>
70 <property name="right_attach">3</property>
71 <property name="top_attach">3</property>
72 <property name="bottom_attach">4</property>
73 <property name="y_options"></property>
74 </packing>
75 </child>
76 <child>
77 <widget class="GtkComboBox" id="image_combo">
78 <property name="visible">True</property>
79 <property name="sensitive">False</property>
80 </widget>
81 <packing>
82 <property name="left_attach">1</property>
83 <property name="right_attach">2</property>
84 <property name="top_attach">6</property>
85 <property name="bottom_attach">7</property>
86 <property name="y_options"></property>
87 </packing>
88 </child>
89 <child>
90 <widget class="GtkLabel" id="image_label">
91 <property name="visible">True</property>
92 <property name="sensitive">False</property>
93 <property name="xalign">0</property>
94 <property name="xpad">12</property>
95 <property name="label" translatable="yes">Image:</property>
96 </widget>
97 <packing>
98 <property name="top_attach">6</property>
99 <property name="bottom_attach">7</property>
100 <property name="y_options"></property>
101 </packing>
102 </child>
103 <child>
104 <widget class="GtkComboBox" id="distribution_combo">
105 <property name="visible">True</property>
106 <property name="sensitive">False</property>
107 </widget>
108 <packing>
109 <property name="left_attach">1</property>
110 <property name="right_attach">2</property>
111 <property name="top_attach">5</property>
112 <property name="bottom_attach">6</property>
113 <property name="y_options"></property>
114 </packing>
115 </child>
116 <child>
117 <widget class="GtkLabel" id="distribution_label">
118 <property name="visible">True</property>
119 <property name="sensitive">False</property>
120 <property name="xalign">0</property>
121 <property name="xpad">12</property>
122 <property name="label" translatable="yes">Distribution:</property>
123 </widget>
124 <packing>
125 <property name="top_attach">5</property>
126 <property name="bottom_attach">6</property>
127 <property name="y_options"></property>
128 </packing>
129 </child>
130 <child>
131 <widget class="GtkComboBox" id="machine_combo">
132 <property name="visible">True</property>
133 <property name="sensitive">False</property>
134 </widget>
135 <packing>
136 <property name="left_attach">1</property>
137 <property name="right_attach">2</property>
138 <property name="top_attach">4</property>
139 <property name="bottom_attach">5</property>
140 <property name="y_options"></property>
141 </packing>
142 </child>
143 <child>
144 <widget class="GtkLabel" id="machine_label">
145 <property name="visible">True</property>
146 <property name="sensitive">False</property>
147 <property name="xalign">0</property>
148 <property name="xpad">12</property>
149 <property name="label" translatable="yes">Machine:</property>
150 </widget>
151 <packing>
152 <property name="top_attach">4</property>
153 <property name="bottom_attach">5</property>
154 <property name="y_options"></property>
155 </packing>
156 </child>
157 <child>
158 <widget class="GtkButton" id="refresh_button">
159 <property name="visible">True</property>
160 <property name="sensitive">False</property>
161 <property name="can_focus">True</property>
162 <property name="receives_default">True</property>
163 <property name="label" translatable="yes">gtk-refresh</property>
164 <property name="use_stock">True</property>
165 <property name="response_id">0</property>
166 </widget>
167 <packing>
168 <property name="left_attach">2</property>
169 <property name="right_attach">3</property>
170 <property name="top_attach">1</property>
171 <property name="bottom_attach">2</property>
172 <property name="y_options"></property>
173 </packing>
174 </child>
175 <child>
176 <widget class="GtkEntry" id="location_entry">
177 <property name="visible">True</property>
178 <property name="can_focus">True</property>
179 <property name="width_chars">32</property>
180 </widget>
181 <packing>
182 <property name="left_attach">1</property>
183 <property name="right_attach">2</property>
184 <property name="top_attach">1</property>
185 <property name="bottom_attach">2</property>
186 <property name="y_options"></property>
187 </packing>
188 </child>
189 <child>
190 <widget class="GtkLabel" id="label3">
191 <property name="visible">True</property>
192 <property name="xalign">0</property>
193 <property name="xpad">12</property>
194 <property name="label" translatable="yes">Location:</property>
195 </widget>
196 <packing>
197 <property name="top_attach">1</property>
198 <property name="bottom_attach">2</property>
199 <property name="y_options"></property>
200 </packing>
201 </child>
202 <child>
203 <widget class="GtkLabel" id="label1">
204 <property name="visible">True</property>
205 <property name="xalign">0</property>
206 <property name="label" translatable="yes">&lt;b&gt;Repository&lt;/b&gt;</property>
207 <property name="use_markup">True</property>
208 </widget>
209 <packing>
210 <property name="right_attach">3</property>
211 <property name="y_options"></property>
212 </packing>
213 </child>
214 <child>
215 <widget class="GtkAlignment" id="alignment1">
216 <property name="visible">True</property>
217 <child>
218 <placeholder/>
219 </child>
220 </widget>
221 <packing>
222 <property name="left_attach">2</property>
223 <property name="right_attach">3</property>
224 <property name="top_attach">4</property>
225 <property name="bottom_attach">5</property>
226 <property name="y_options"></property>
227 </packing>
228 </child>
229 <child>
230 <widget class="GtkAlignment" id="alignment2">
231 <property name="visible">True</property>
232 <child>
233 <placeholder/>
234 </child>
235 </widget>
236 <packing>
237 <property name="left_attach">2</property>
238 <property name="right_attach">3</property>
239 <property name="top_attach">5</property>
240 <property name="bottom_attach">6</property>
241 <property name="y_options"></property>
242 </packing>
243 </child>
244 <child>
245 <widget class="GtkAlignment" id="alignment3">
246 <property name="visible">True</property>
247 <child>
248 <placeholder/>
249 </child>
250 </widget>
251 <packing>
252 <property name="left_attach">2</property>
253 <property name="right_attach">3</property>
254 <property name="top_attach">6</property>
255 <property name="bottom_attach">7</property>
256 <property name="y_options"></property>
257 </packing>
258 </child>
259 </widget>
260 <packing>
261 <property name="position">1</property>
262 </packing>
263 </child>
264 <child internal-child="action_area">
265 <widget class="GtkHButtonBox" id="dialog-action_area1">
266 <property name="visible">True</property>
267 <property name="layout_style">GTK_BUTTONBOX_END</property>
268 <child>
269 <placeholder/>
270 </child>
271 <child>
272 <placeholder/>
273 </child>
274 <child>
275 <placeholder/>
276 </child>
277 </widget>
278 <packing>
279 <property name="expand">False</property>
280 <property name="pack_type">GTK_PACK_END</property>
281 </packing>
282 </child>
283 </widget>
284 </child>
285 </widget>
286 <widget class="GtkDialog" id="dialog2">
287 <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
288 <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
289 <property name="has_separator">False</property>
290 <child internal-child="vbox">
291 <widget class="GtkVBox" id="dialog-vbox2">
292 <property name="visible">True</property>
293 <property name="spacing">2</property>
294 <child>
295 <widget class="GtkTable" id="table2">
296 <property name="visible">True</property>
297 <property name="border_width">6</property>
298 <property name="n_rows">7</property>
299 <property name="n_columns">3</property>
300 <property name="column_spacing">6</property>
301 <property name="row_spacing">6</property>
302 <child>
303 <widget class="GtkLabel" id="label7">
304 <property name="visible">True</property>
305 <property name="xalign">0</property>
306 <property name="label" translatable="yes">&lt;b&gt;Repositories&lt;/b&gt;</property>
307 <property name="use_markup">True</property>
308 </widget>
309 <packing>
310 <property name="right_attach">3</property>
311 <property name="y_options"></property>
312 </packing>
313 </child>
314 <child>
315 <widget class="GtkAlignment" id="alignment4">
316 <property name="visible">True</property>
317 <property name="xalign">0</property>
318 <property name="left_padding">12</property>
319 <child>
320 <widget class="GtkScrolledWindow" id="scrolledwindow1">
321 <property name="visible">True</property>
322 <property name="can_focus">True</property>
323 <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
324 <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
325 <child>
326 <widget class="GtkTreeView" id="treeview1">
327 <property name="visible">True</property>
328 <property name="can_focus">True</property>
329 <property name="headers_clickable">True</property>
330 </widget>
331 </child>
332 </widget>
333 </child>
334 </widget>
335 <packing>
336 <property name="right_attach">3</property>
337 <property name="top_attach">2</property>
338 <property name="bottom_attach">3</property>
339 <property name="y_options"></property>
340 </packing>
341 </child>
342 <child>
343 <widget class="GtkEntry" id="entry1">
344 <property name="visible">True</property>
345 <property name="can_focus">True</property>
346 </widget>
347 <packing>
348 <property name="left_attach">1</property>
349 <property name="right_attach">3</property>
350 <property name="top_attach">1</property>
351 <property name="bottom_attach">2</property>
352 <property name="y_options"></property>
353 </packing>
354 </child>
355 <child>
356 <widget class="GtkLabel" id="label9">
357 <property name="visible">True</property>
358 <property name="xalign">0</property>
359 <property name="label" translatable="yes">&lt;b&gt;Additional packages&lt;/b&gt;</property>
360 <property name="use_markup">True</property>
361 </widget>
362 <packing>
363 <property name="right_attach">3</property>
364 <property name="top_attach">4</property>
365 <property name="bottom_attach">5</property>
366 <property name="y_options"></property>
367 </packing>
368 </child>
369 <child>
370 <widget class="GtkAlignment" id="alignment6">
371 <property name="visible">True</property>
372 <property name="xalign">0</property>
373 <property name="xscale">0</property>
374 <child>
375 <widget class="GtkLabel" id="label8">
376 <property name="visible">True</property>
377 <property name="xalign">0</property>
378 <property name="yalign">0</property>
379 <property name="xpad">12</property>
380 <property name="label" translatable="yes">Location: </property>
381 </widget>
382 </child>
383 </widget>
384 <packing>
385 <property name="top_attach">1</property>
386 <property name="bottom_attach">2</property>
387 <property name="y_options"></property>
388 </packing>
389 </child>
390 <child>
391 <widget class="GtkAlignment" id="alignment7">
392 <property name="visible">True</property>
393 <property name="xalign">1</property>
394 <property name="xscale">0</property>
395 <child>
396 <widget class="GtkHButtonBox" id="hbuttonbox1">
397 <property name="visible">True</property>
398 <property name="spacing">5</property>
399 <child>
400 <widget class="GtkButton" id="button7">
401 <property name="visible">True</property>
402 <property name="can_focus">True</property>
403 <property name="receives_default">True</property>
404 <property name="label" translatable="yes">gtk-remove</property>
405 <property name="use_stock">True</property>
406 <property name="response_id">0</property>
407 </widget>
408 </child>
409 <child>
410 <widget class="GtkButton" id="button6">
411 <property name="visible">True</property>
412 <property name="can_focus">True</property>
413 <property name="receives_default">True</property>
414 <property name="label" translatable="yes">gtk-edit</property>
415 <property name="use_stock">True</property>
416 <property name="response_id">0</property>
417 </widget>
418 <packing>
419 <property name="position">1</property>
420 </packing>
421 </child>
422 <child>
423 <widget class="GtkButton" id="button5">
424 <property name="visible">True</property>
425 <property name="can_focus">True</property>
426 <property name="receives_default">True</property>
427 <property name="label" translatable="yes">gtk-add</property>
428 <property name="use_stock">True</property>
429 <property name="response_id">0</property>
430 </widget>
431 <packing>
432 <property name="position">2</property>
433 </packing>
434 </child>
435 </widget>
436 </child>
437 </widget>
438 <packing>
439 <property name="left_attach">1</property>
440 <property name="right_attach">3</property>
441 <property name="top_attach">3</property>
442 <property name="bottom_attach">4</property>
443 <property name="y_options"></property>
444 </packing>
445 </child>
446 <child>
447 <widget class="GtkAlignment" id="alignment5">
448 <property name="visible">True</property>
449 <child>
450 <placeholder/>
451 </child>
452 </widget>
453 <packing>
454 <property name="top_attach">3</property>
455 <property name="bottom_attach">4</property>
456 <property name="y_options"></property>
457 </packing>
458 </child>
459 <child>
460 <widget class="GtkLabel" id="label10">
461 <property name="visible">True</property>
462 <property name="xalign">0</property>
463 <property name="yalign">0</property>
464 <property name="xpad">12</property>
465 <property name="label" translatable="yes">Search:</property>
466 </widget>
467 <packing>
468 <property name="top_attach">5</property>
469 <property name="bottom_attach">6</property>
470 <property name="y_options"></property>
471 </packing>
472 </child>
473 <child>
474 <widget class="GtkEntry" id="entry2">
475 <property name="visible">True</property>
476 <property name="can_focus">True</property>
477 </widget>
478 <packing>
479 <property name="left_attach">1</property>
480 <property name="right_attach">3</property>
481 <property name="top_attach">5</property>
482 <property name="bottom_attach">6</property>
483 <property name="y_options"></property>
484 </packing>
485 </child>
486 <child>
487 <widget class="GtkAlignment" id="alignment8">
488 <property name="visible">True</property>
489 <property name="xalign">0</property>
490 <property name="left_padding">12</property>
491 <child>
492 <widget class="GtkScrolledWindow" id="scrolledwindow2">
493 <property name="visible">True</property>
494 <property name="can_focus">True</property>
495 <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
496 <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
497 <child>
498 <widget class="GtkTreeView" id="treeview2">
499 <property name="visible">True</property>
500 <property name="can_focus">True</property>
501 <property name="headers_clickable">True</property>
502 </widget>
503 </child>
504 </widget>
505 </child>
506 </widget>
507 <packing>
508 <property name="right_attach">3</property>
509 <property name="top_attach">6</property>
510 <property name="bottom_attach">7</property>
511 <property name="y_options"></property>
512 </packing>
513 </child>
514 </widget>
515 <packing>
516 <property name="position">1</property>
517 </packing>
518 </child>
519 <child internal-child="action_area">
520 <widget class="GtkHButtonBox" id="dialog-action_area2">
521 <property name="visible">True</property>
522 <property name="layout_style">GTK_BUTTONBOX_END</property>
523 <child>
524 <widget class="GtkButton" id="button4">
525 <property name="visible">True</property>
526 <property name="can_focus">True</property>
527 <property name="receives_default">True</property>
528 <property name="label" translatable="yes">gtk-close</property>
529 <property name="use_stock">True</property>
530 <property name="response_id">0</property>
531 </widget>
532 </child>
533 </widget>
534 <packing>
535 <property name="expand">False</property>
536 <property name="pack_type">GTK_PACK_END</property>
537 </packing>
538 </child>
539 </widget>
540 </child>
541 </widget>
542 <widget class="GtkWindow" id="main_window">
543 <child>
544 <widget class="GtkVBox" id="main_window_vbox">
545 <property name="visible">True</property>
546 <child>
547 <widget class="GtkToolbar" id="main_toolbar">
548 <property name="visible">True</property>
549 <child>
550 <widget class="GtkToolButton" id="main_toolbutton_build">
551 <property name="visible">True</property>
552 <property name="label" translatable="yes">Build</property>
553 <property name="stock_id">gtk-execute</property>
554 </widget>
555 <packing>
556 <property name="expand">False</property>
557 </packing>
558 </child>
559 </widget>
560 <packing>
561 <property name="expand">False</property>
562 </packing>
563 </child>
564 <child>
565 <widget class="GtkVPaned" id="vpaned1">
566 <property name="visible">True</property>
567 <property name="can_focus">True</property>
568 <child>
569 <widget class="GtkScrolledWindow" id="results_scrolledwindow">
570 <property name="visible">True</property>
571 <property name="can_focus">True</property>
572 <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
573 <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
574 <child>
575 <placeholder/>
576 </child>
577 </widget>
578 <packing>
579 <property name="resize">False</property>
580 <property name="shrink">True</property>
581 </packing>
582 </child>
583 <child>
584 <widget class="GtkScrolledWindow" id="progress_scrolledwindow">
585 <property name="visible">True</property>
586 <property name="can_focus">True</property>
587 <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
588 <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
589 <child>
590 <placeholder/>
591 </child>
592 </widget>
593 <packing>
594 <property name="resize">True</property>
595 <property name="shrink">True</property>
596 </packing>
597 </child>
598 </widget>
599 <packing>
600 <property name="position">1</property>
601 </packing>
602 </child>
603 </widget>
604 </child>
605 </widget>
606</glade-interface>
diff --git a/bitbake/lib/bb/ui/crumbs/recipeselectionpage.py b/bitbake/lib/bb/ui/crumbs/recipeselectionpage.py
new file mode 100755
index 0000000000..58db43f706
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/recipeselectionpage.py
@@ -0,0 +1,335 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
8# Authored by Shane Wang <shane.wang@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import gtk
24import glib
25from bb.ui.crumbs.hobcolor import HobColors
26from bb.ui.crumbs.hobwidget import HobViewTable, HobNotebook, HobAltButton, HobButton
27from bb.ui.crumbs.hoblistmodel import RecipeListModel
28from bb.ui.crumbs.hobpages import HobPage
29
30#
31# RecipeSelectionPage
32#
33class RecipeSelectionPage (HobPage):
34 pages = [
35 {
36 'name' : 'Included recipes',
37 'tooltip' : 'The recipes currently included for your image',
38 'filter' : { RecipeListModel.COL_INC : [True],
39 RecipeListModel.COL_TYPE : ['recipe', 'packagegroup'] },
40 'search' : 'Search recipes by name',
41 'searchtip' : 'Enter a recipe name to find it',
42 'columns' : [{
43 'col_name' : 'Recipe name',
44 'col_id' : RecipeListModel.COL_NAME,
45 'col_style': 'text',
46 'col_min' : 100,
47 'col_max' : 400,
48 'expand' : 'True'
49 }, {
50 'col_name' : 'Group',
51 'col_id' : RecipeListModel.COL_GROUP,
52 'col_style': 'text',
53 'col_min' : 100,
54 'col_max' : 300,
55 'expand' : 'True'
56 }, {
57 'col_name' : 'Brought in by (+others)',
58 'col_id' : RecipeListModel.COL_BINB,
59 'col_style': 'binb',
60 'col_min' : 100,
61 'col_max' : 500,
62 'expand' : 'True'
63 }, {
64 'col_name' : 'Included',
65 'col_id' : RecipeListModel.COL_INC,
66 'col_style': 'check toggle',
67 'col_min' : 100,
68 'col_max' : 100
69 }]
70 }, {
71 'name' : 'All recipes',
72 'tooltip' : 'All recipes in your configured layers',
73 'filter' : { RecipeListModel.COL_TYPE : ['recipe'] },
74 'search' : 'Search recipes by name',
75 'searchtip' : 'Enter a recipe name to find it',
76 'columns' : [{
77 'col_name' : 'Recipe name',
78 'col_id' : RecipeListModel.COL_NAME,
79 'col_style': 'text',
80 'col_min' : 100,
81 'col_max' : 400,
82 'expand' : 'True'
83 }, {
84 'col_name' : 'Group',
85 'col_id' : RecipeListModel.COL_GROUP,
86 'col_style': 'text',
87 'col_min' : 100,
88 'col_max' : 400,
89 'expand' : 'True'
90 }, {
91 'col_name' : 'License',
92 'col_id' : RecipeListModel.COL_LIC,
93 'col_style': 'text',
94 'col_min' : 100,
95 'col_max' : 400,
96 'expand' : 'True'
97 }, {
98 'col_name' : 'Included',
99 'col_id' : RecipeListModel.COL_INC,
100 'col_style': 'check toggle',
101 'col_min' : 100,
102 'col_max' : 100
103 }]
104 }, {
105 'name' : 'Package Groups',
106 'tooltip' : 'All package groups in your configured layers',
107 'filter' : { RecipeListModel.COL_TYPE : ['packagegroup'] },
108 'search' : 'Search package groups by name',
109 'searchtip' : 'Enter a package group name to find it',
110 'columns' : [{
111 'col_name' : 'Package group name',
112 'col_id' : RecipeListModel.COL_NAME,
113 'col_style': 'text',
114 'col_min' : 100,
115 'col_max' : 400,
116 'expand' : 'True'
117 }, {
118 'col_name' : 'Included',
119 'col_id' : RecipeListModel.COL_INC,
120 'col_style': 'check toggle',
121 'col_min' : 100,
122 'col_max' : 100
123 }]
124 }
125 ]
126
127 (INCLUDED,
128 ALL,
129 TASKS) = range(3)
130
131 def __init__(self, builder = None):
132 super(RecipeSelectionPage, self).__init__(builder, "Step 1 of 2: Edit recipes")
133
134 # set invisible members
135 self.recipe_model = self.builder.recipe_model
136
137 # create visual elements
138 self.create_visual_elements()
139
140 def included_clicked_cb(self, button):
141 self.ins.set_current_page(self.INCLUDED)
142
143 def create_visual_elements(self):
144 self.eventbox = self.add_onto_top_bar(None, 73)
145 self.pack_start(self.eventbox, expand=False, fill=False)
146 self.pack_start(self.group_align, expand=True, fill=True)
147
148 # set visible members
149 self.ins = HobNotebook()
150 self.tables = [] # we need modify table when the dialog is shown
151
152 search_names = []
153 search_tips = []
154 # append the tabs in order
155 for page in self.pages:
156 columns = page['columns']
157 name = page['name']
158 tab = HobViewTable(columns, name)
159 search_names.append(page['search'])
160 search_tips.append(page['searchtip'])
161 filter = page['filter']
162 sort_model = self.recipe_model.tree_model(filter, initial=True)
163 tab.set_model(sort_model)
164 tab.connect("toggled", self.table_toggled_cb, name)
165 tab.connect("button-release-event", self.button_click_cb)
166 tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include, filter)
167 self.ins.append_page(tab, page['name'], page['tooltip'])
168 self.tables.append(tab)
169
170 self.ins.set_entry(search_names, search_tips)
171 self.ins.search.connect("changed", self.search_entry_changed)
172
173 # add all into the window
174 self.box_group_area.pack_start(self.ins, expand=True, fill=True)
175
176 button_box = gtk.HBox(False, 6)
177 self.box_group_area.pack_end(button_box, expand=False, fill=False)
178
179 self.build_packages_button = HobButton('Build packages')
180 #self.build_packages_button.set_size_request(205, 49)
181 self.build_packages_button.set_tooltip_text("Build selected recipes into packages")
182 self.build_packages_button.set_flags(gtk.CAN_DEFAULT)
183 self.build_packages_button.grab_default()
184 self.build_packages_button.connect("clicked", self.build_packages_clicked_cb)
185 button_box.pack_end(self.build_packages_button, expand=False, fill=False)
186
187 self.back_button = HobAltButton('Cancel')
188 self.back_button.connect("clicked", self.back_button_clicked_cb)
189 button_box.pack_end(self.back_button, expand=False, fill=False)
190
191 def search_entry_changed(self, entry):
192 text = entry.get_text()
193 if self.ins.search_focus:
194 self.ins.search_focus = False
195 elif self.ins.page_changed:
196 self.ins.page_change = False
197 self.filter_search(entry)
198 elif text not in self.ins.search_names:
199 self.filter_search(entry)
200
201 def filter_search(self, entry):
202 text = entry.get_text()
203 current_tab = self.ins.get_current_page()
204 filter = self.pages[current_tab]['filter']
205 filter[RecipeListModel.COL_NAME] = text
206 self.tables[current_tab].set_model(self.recipe_model.tree_model(filter, search_data=text))
207 if self.recipe_model.filtered_nb == 0:
208 if not self.ins.get_nth_page(current_tab).top_bar:
209 self.ins.get_nth_page(current_tab).add_no_result_bar(entry)
210 self.ins.get_nth_page(current_tab).top_bar.set_no_show_all(True)
211 self.ins.get_nth_page(current_tab).top_bar.show()
212 self.ins.get_nth_page(current_tab).scroll.hide()
213 else:
214 if self.ins.get_nth_page(current_tab).top_bar:
215 self.ins.get_nth_page(current_tab).top_bar.hide()
216 self.ins.get_nth_page(current_tab).scroll.show()
217 if entry.get_text() == '':
218 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
219 else:
220 entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, True)
221
222 def button_click_cb(self, widget, event):
223 path, col = widget.table_tree.get_cursor()
224 tree_model = widget.table_tree.get_model()
225 if path and col.get_title() != 'Included': # else activation is likely a removal
226 properties = {'summary': '', 'name': '', 'version': '', 'revision': '', 'binb': '', 'group': '', 'license': '', 'homepage': '', 'bugtracker': '', 'description': ''}
227 properties['summary'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_SUMMARY)
228 properties['name'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_NAME)
229 properties['version'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_VERSION)
230 properties['revision'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_REVISION)
231 properties['binb'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_BINB)
232 properties['group'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_GROUP)
233 properties['license'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_LIC)
234 properties['homepage'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_HOMEPAGE)
235 properties['bugtracker'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_BUGTRACKER)
236 properties['description'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_DESC)
237 self.builder.show_recipe_property_dialog(properties)
238
239 def build_packages_clicked_cb(self, button):
240 self.refresh_tables()
241 self.builder.build_packages()
242
243 def refresh_tables(self):
244 self.ins.reset_entry(self.ins.search, 0)
245 for tab in self.tables:
246 index = self.tables.index(tab)
247 filter = self.pages[index]['filter']
248 tab.set_model(self.recipe_model.tree_model(filter, search_data="", initial=True))
249
250 def back_button_clicked_cb(self, button):
251 self.builder.recipe_model.set_selected_image(self.builder.configuration.initial_selected_image)
252 self.builder.image_configuration_page.update_image_combo(self.builder.recipe_model, self.builder.configuration.initial_selected_image)
253 self.builder.image_configuration_page.update_image_desc()
254 self.builder.show_configuration()
255 self.refresh_tables()
256
257 def refresh_selection(self):
258 self.builder.configuration.selected_image = self.recipe_model.get_selected_image()
259 _, self.builder.configuration.selected_recipes = self.recipe_model.get_selected_recipes()
260 self.ins.show_indicator_icon("Included recipes", len(self.builder.configuration.selected_recipes))
261
262 def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
263 if not self.recipe_model.path_included(path):
264 self.recipe_model.include_item(item_path=path, binb="User Selected", image_contents=False)
265 else:
266 self.pre_fadeout_checkout_include(view_tree, pagename)
267 self.recipe_model.exclude_item(item_path=path)
268 self.render_fadeout(view_tree, cell)
269
270 self.refresh_selection()
271 if not self.builder.customized:
272 self.builder.customized = True
273 self.builder.configuration.selected_image = self.recipe_model.__custom_image__
274 self.builder.rcppkglist_populated()
275
276 self.builder.window_sensitive(True)
277
278 view_model = view_tree.get_model()
279 vpath = self.recipe_model.convert_path_to_vpath(view_model, path)
280 view_tree.set_cursor(vpath)
281
282 def table_toggled_cb(self, table, cell, view_path, toggled_columnid, view_tree, pagename):
283 # Click to include a recipe
284 self.builder.window_sensitive(False)
285 view_model = view_tree.get_model()
286 path = self.recipe_model.convert_vpath_to_path(view_model, view_path)
287 glib.idle_add(self.toggle_item_idle_cb, path, view_tree, cell, pagename)
288
289 def pre_fadeout_checkout_include(self, tree, pagename):
290 #after the fadeout the table will be sorted as before
291 self.sort_column_id = self.recipe_model.sort_column_id
292 self.sort_order = self.recipe_model.sort_order
293
294 #resync the included items to a backup fade include column
295 it = self.recipe_model.get_iter_first()
296 while it:
297 active = self.recipe_model.get_value(it, self.recipe_model.COL_INC)
298 self.recipe_model.set(it, self.recipe_model.COL_FADE_INC, active)
299 it = self.recipe_model.iter_next(it)
300 # Check out a model which base on the column COL_FADE_INC,
301 # it's save the prev state of column COL_INC before do exclude_item
302 filter = { RecipeListModel.COL_FADE_INC:[True] }
303 if pagename == "Included recipes":
304 filter[RecipeListModel.COL_TYPE] = ['recipe', 'packagegroup']
305 elif pagename == "All recipes":
306 filter[RecipeListModel.COL_TYPE] = ['recipe']
307 else:
308 filter[RecipeListModel.COL_TYPE] = ['packagegroup']
309
310 new_model = self.recipe_model.tree_model(filter, excluded_items_ahead=True)
311 tree.set_model(new_model)
312
313 def render_fadeout(self, tree, cell):
314 if (not cell) or (not tree):
315 return
316 to_render_cells = []
317 model = tree.get_model()
318 it = model.get_iter_first()
319 while it:
320 path = model.get_path(it)
321 prev_cell_is_active = model.get_value(it, RecipeListModel.COL_FADE_INC)
322 curr_cell_is_active = model.get_value(it, RecipeListModel.COL_INC)
323 if (prev_cell_is_active == True) and (curr_cell_is_active == False):
324 to_render_cells.append(path)
325 it = model.iter_next(it)
326
327 cell.fadeout(tree, 1000, to_render_cells)
328
329 def after_fadeout_checkin_include(self, table, ctrl, cell, tree, filter):
330 self.recipe_model.sort_column_id = self.sort_column_id
331 self.recipe_model.sort_order = self.sort_order
332 tree.set_model(self.recipe_model.tree_model(filter))
333
334 def set_recipe_curr_tab(self, curr_page):
335 self.ins.set_current_page(curr_page)
diff --git a/bitbake/lib/bb/ui/crumbs/runningbuild.py b/bitbake/lib/bb/ui/crumbs/runningbuild.py
new file mode 100644
index 0000000000..16a955d2b1
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/runningbuild.py
@@ -0,0 +1,551 @@
1
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2008 Intel Corporation
6#
7# Authored by Rob Bradford <rob@linux.intel.com>
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import gtk
23import gobject
24import logging
25import time
26import urllib
27import urllib2
28import pango
29from bb.ui.crumbs.hobcolor import HobColors
30from bb.ui.crumbs.hobwidget import HobWarpCellRendererText, HobCellRendererPixbuf
31
32class RunningBuildModel (gtk.TreeStore):
33 (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
34
35 def __init__ (self):
36 gtk.TreeStore.__init__ (self,
37 gobject.TYPE_STRING,
38 gobject.TYPE_STRING,
39 gobject.TYPE_STRING,
40 gobject.TYPE_STRING,
41 gobject.TYPE_STRING,
42 gobject.TYPE_STRING,
43 gobject.TYPE_INT)
44
45 def failure_model_filter(self, model, it):
46 color = model.get(it, self.COL_COLOR)[0]
47 if not color:
48 return False
49 if color == HobColors.ERROR or color == HobColors.WARNING:
50 return True
51 return False
52
53 def failure_model(self):
54 model = self.filter_new()
55 model.set_visible_func(self.failure_model_filter)
56 return model
57
58 def foreach_cell_func(self, model, path, iter, usr_data=None):
59 if model.get_value(iter, self.COL_ICON) == "gtk-execute":
60 model.set(iter, self.COL_ICON, "")
61
62 def close_task_refresh(self):
63 self.foreach(self.foreach_cell_func, None)
64
65class RunningBuild (gobject.GObject):
66 __gsignals__ = {
67 'build-started' : (gobject.SIGNAL_RUN_LAST,
68 gobject.TYPE_NONE,
69 ()),
70 'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
71 gobject.TYPE_NONE,
72 ()),
73 'build-failed' : (gobject.SIGNAL_RUN_LAST,
74 gobject.TYPE_NONE,
75 ()),
76 'build-complete' : (gobject.SIGNAL_RUN_LAST,
77 gobject.TYPE_NONE,
78 ()),
79 'build-aborted' : (gobject.SIGNAL_RUN_LAST,
80 gobject.TYPE_NONE,
81 ()),
82 'task-started' : (gobject.SIGNAL_RUN_LAST,
83 gobject.TYPE_NONE,
84 (gobject.TYPE_PYOBJECT,)),
85 'log-error' : (gobject.SIGNAL_RUN_LAST,
86 gobject.TYPE_NONE,
87 ()),
88 'log-warning' : (gobject.SIGNAL_RUN_LAST,
89 gobject.TYPE_NONE,
90 ()),
91 'disk-full' : (gobject.SIGNAL_RUN_LAST,
92 gobject.TYPE_NONE,
93 ()),
94 'no-provider' : (gobject.SIGNAL_RUN_LAST,
95 gobject.TYPE_NONE,
96 (gobject.TYPE_PYOBJECT,)),
97 'log' : (gobject.SIGNAL_RUN_LAST,
98 gobject.TYPE_NONE,
99 (gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
100 }
101 pids_to_task = {}
102 tasks_to_iter = {}
103
104 def __init__ (self, sequential=False):
105 gobject.GObject.__init__ (self)
106 self.model = RunningBuildModel()
107 self.sequential = sequential
108 self.buildaborted = False
109
110 def reset (self):
111 self.pids_to_task.clear()
112 self.tasks_to_iter.clear()
113 self.model.clear()
114
115 def handle_event (self, event, pbar=None):
116 # Handle an event from the event queue, this may result in updating
117 # the model and thus the UI. Or it may be to tell us that the build
118 # has finished successfully (or not, as the case may be.)
119
120 parent = None
121 pid = 0
122 package = None
123 task = None
124
125 # If we have a pid attached to this message/event try and get the
126 # (package, task) pair for it. If we get that then get the parent iter
127 # for the message.
128 if hasattr(event, 'pid'):
129 pid = event.pid
130 if hasattr(event, 'process'):
131 pid = event.process
132
133 if pid and pid in self.pids_to_task:
134 (package, task) = self.pids_to_task[pid]
135 parent = self.tasks_to_iter[(package, task)]
136
137 if(isinstance(event, logging.LogRecord)):
138 if event.taskpid == 0 or event.levelno > logging.INFO:
139 self.emit("log", "handle", event)
140 # FIXME: this is a hack! More info in Yocto #1433
141 # http://bugzilla.pokylinux.org/show_bug.cgi?id=1433, temporarily
142 # mask the error message as it's not informative for the user.
143 if event.msg.startswith("Execution of event handler 'run_buildstats' failed"):
144 return
145
146 if (event.levelno < logging.INFO or
147 event.msg.startswith("Running task")):
148 return # don't add these to the list
149
150 if event.levelno >= logging.ERROR:
151 icon = "dialog-error"
152 color = HobColors.ERROR
153 self.emit("log-error")
154 elif event.levelno >= logging.WARNING:
155 icon = "dialog-warning"
156 color = HobColors.WARNING
157 self.emit("log-warning")
158 else:
159 icon = None
160 color = HobColors.OK
161
162 # if we know which package we belong to, we'll append onto its list.
163 # otherwise, we'll jump to the top of the master list
164 if self.sequential or not parent:
165 tree_add = self.model.append
166 else:
167 tree_add = self.model.prepend
168 tree_add(parent,
169 (None,
170 package,
171 task,
172 event.getMessage(),
173 icon,
174 color,
175 0))
176
177 # if there are warnings while processing a package
178 # (parent), mark the task with warning color;
179 # in case there are errors, the updates will be
180 # handled on TaskFailed.
181 if color == HobColors.WARNING and parent:
182 self.model.set(parent, self.model.COL_COLOR, color)
183 if task: #then we have a parent (package), and update it's color
184 self.model.set(self.tasks_to_iter[(package, None)], self.model.COL_COLOR, color)
185
186 elif isinstance(event, bb.build.TaskStarted):
187 (package, task) = (event._package, event._task)
188
189 # Save out this PID.
190 self.pids_to_task[pid] = (package, task)
191
192 # Check if we already have this package in our model. If so then
193 # that can be the parent for the task. Otherwise we create a new
194 # top level for the package.
195 if ((package, None) in self.tasks_to_iter):
196 parent = self.tasks_to_iter[(package, None)]
197 else:
198 if self.sequential:
199 add = self.model.append
200 else:
201 add = self.model.prepend
202 parent = add(None, (None,
203 package,
204 None,
205 "Package: %s" % (package),
206 None,
207 HobColors.OK,
208 0))
209 self.tasks_to_iter[(package, None)] = parent
210
211 # Because this parent package now has an active child mark it as
212 # such.
213 self.model.set(parent, self.model.COL_ICON, "gtk-execute")
214 parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
215 if parent_color != HobColors.ERROR and parent_color != HobColors.WARNING:
216 self.model.set(parent, self.model.COL_COLOR, HobColors.RUNNING)
217
218 # Add an entry in the model for this task
219 i = self.model.append (parent, (None,
220 package,
221 task,
222 "Task: %s" % (task),
223 "gtk-execute",
224 HobColors.RUNNING,
225 0))
226
227 # update the parent's active task count
228 num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] + 1
229 self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
230
231 # Save out the iter so that we can find it when we have a message
232 # that we need to attach to a task.
233 self.tasks_to_iter[(package, task)] = i
234
235 elif isinstance(event, bb.build.TaskBase):
236 self.emit("log", "info", event._message)
237 current = self.tasks_to_iter[(package, task)]
238 parent = self.tasks_to_iter[(package, None)]
239
240 # remove this task from the parent's active count
241 num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] - 1
242 self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
243
244 if isinstance(event, bb.build.TaskFailed):
245 # Mark the task and parent as failed
246 icon = "dialog-error"
247 color = HobColors.ERROR
248
249 logfile = event.logfile
250 if logfile and os.path.exists(logfile):
251 with open(logfile) as f:
252 logdata = f.read()
253 self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', HobColors.OK, 0))
254
255 for i in (current, parent):
256 self.model.set(i, self.model.COL_ICON, icon,
257 self.model.COL_COLOR, color)
258 else:
259 # Mark the parent package and the task as inactive,
260 # but make sure to preserve error, warnings and active
261 # states
262 parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
263 task_color = self.model.get(current, self.model.COL_COLOR)[0]
264
265 # Mark the task as inactive
266 self.model.set(current, self.model.COL_ICON, None)
267 if task_color != HobColors.ERROR:
268 if task_color == HobColors.WARNING:
269 self.model.set(current, self.model.COL_ICON, 'dialog-warning')
270 else:
271 self.model.set(current, self.model.COL_COLOR, HobColors.OK)
272
273 # Mark the parent as inactive
274 if parent_color != HobColors.ERROR:
275 if parent_color == HobColors.WARNING:
276 self.model.set(parent, self.model.COL_ICON, "dialog-warning")
277 else:
278 self.model.set(parent, self.model.COL_ICON, None)
279 if num_active == 0:
280 self.model.set(parent, self.model.COL_COLOR, HobColors.OK)
281
282 # Clear the iters and the pids since when the task goes away the
283 # pid will no longer be used for messages
284 del self.tasks_to_iter[(package, task)]
285 del self.pids_to_task[pid]
286
287 elif isinstance(event, bb.event.BuildStarted):
288
289 self.emit("build-started")
290 self.model.prepend(None, (None,
291 None,
292 None,
293 "Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
294 None,
295 HobColors.OK,
296 0))
297 if pbar:
298 pbar.update(0, self.progress_total)
299 pbar.set_title(bb.event.getName(event))
300
301 elif isinstance(event, bb.event.BuildCompleted):
302 failures = int (event._failures)
303 self.model.prepend(None, (None,
304 None,
305 None,
306 "Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
307 None,
308 HobColors.OK,
309 0))
310
311 # Emit the appropriate signal depending on the number of failures
312 if self.buildaborted:
313 self.emit ("build-aborted")
314 self.buildaborted = False
315 elif (failures >= 1):
316 self.emit ("build-failed")
317 else:
318 self.emit ("build-succeeded")
319 # Emit a generic "build-complete" signal for things wishing to
320 # handle when the build is finished
321 self.emit("build-complete")
322 # reset the all cell's icon indicator
323 self.model.close_task_refresh()
324 if pbar:
325 pbar.set_text(event.msg)
326
327 elif isinstance(event, bb.event.DiskFull):
328 self.buildaborted = True
329 self.emit("disk-full")
330
331 elif isinstance(event, bb.command.CommandFailed):
332 self.emit("log", "error", "Command execution failed: %s" % (event.error))
333 if event.error.startswith("Exited with"):
334 # If the command fails with an exit code we're done, emit the
335 # generic signal for the UI to notify the user
336 self.emit("build-complete")
337 # reset the all cell's icon indicator
338 self.model.close_task_refresh()
339
340 elif isinstance(event, bb.event.CacheLoadStarted) and pbar:
341 pbar.set_title("Loading cache")
342 self.progress_total = event.total
343 pbar.update(0, self.progress_total)
344 elif isinstance(event, bb.event.CacheLoadProgress) and pbar:
345 pbar.update(event.current, self.progress_total)
346 elif isinstance(event, bb.event.CacheLoadCompleted) and pbar:
347 pbar.update(self.progress_total, self.progress_total)
348 pbar.hide()
349 elif isinstance(event, bb.event.ParseStarted) and pbar:
350 if event.total == 0:
351 return
352 pbar.set_title("Processing recipes")
353 self.progress_total = event.total
354 pbar.update(0, self.progress_total)
355 elif isinstance(event, bb.event.ParseProgress) and pbar:
356 pbar.update(event.current, self.progress_total)
357 elif isinstance(event, bb.event.ParseCompleted) and pbar:
358 pbar.hide()
359 #using runqueue events as many as possible to update the progress bar
360 elif isinstance(event, bb.runqueue.runQueueTaskFailed):
361 self.emit("log", "error", "Task %s (%s) failed with exit code '%s'" % (event.taskid, event.taskstring, event.exitcode))
362 elif isinstance(event, bb.runqueue.sceneQueueTaskFailed):
363 self.emit("log", "warn", "Setscene task %s (%s) failed with exit code '%s' - real task will be run instead" \
364 % (event.taskid, event.taskstring, event.exitcode))
365 elif isinstance(event, (bb.runqueue.runQueueTaskStarted, bb.runqueue.sceneQueueTaskStarted)):
366 if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
367 self.emit("log", "info", "Running setscene task %d of %d (%s)" % \
368 (event.stats.completed + event.stats.active + event.stats.failed + 1,
369 event.stats.total, event.taskstring))
370 else:
371 if event.noexec:
372 tasktype = 'noexec task'
373 else:
374 tasktype = 'task'
375 self.emit("log", "info", "Running %s %s of %s (ID: %s, %s)" % \
376 (tasktype, event.stats.completed + event.stats.active + event.stats.failed + 1,
377 event.stats.total, event.taskid, event.taskstring))
378 message = {}
379 message["eventname"] = bb.event.getName(event)
380 num_of_completed = event.stats.completed + event.stats.failed
381 message["current"] = num_of_completed
382 message["total"] = event.stats.total
383 message["title"] = ""
384 message["task"] = event.taskstring
385 self.emit("task-started", message)
386 elif isinstance(event, bb.event.MultipleProviders):
387 self.emit("log", "info", "multiple providers are available for %s%s (%s)" \
388 % (event._is_runtime and "runtime " or "", event._item, ", ".join(event._candidates)))
389 self.emit("log", "info", "consider defining a PREFERRED_PROVIDER entry to match %s" % (event._item))
390 elif isinstance(event, bb.event.NoProvider):
391 msg = ""
392 if event._runtime:
393 r = "R"
394 else:
395 r = ""
396
397 extra = ''
398 if not event._reasons:
399 if event._close_matches:
400 extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
401
402 if event._dependees:
403 msg = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s\n" % (r, event._item, ", ".join(event._dependees), r, extra)
404 else:
405 msg = "Nothing %sPROVIDES '%s'%s\n" % (r, event._item, extra)
406 if event._reasons:
407 for reason in event._reasons:
408 msg += ("%s\n" % reason)
409 self.emit("no-provider", msg)
410 self.emit("log", "error", msg)
411 elif isinstance(event, bb.event.LogExecTTY):
412 icon = "dialog-warning"
413 color = HobColors.WARNING
414 if self.sequential or not parent:
415 tree_add = self.model.append
416 else:
417 tree_add = self.model.prepend
418 tree_add(parent,
419 (None,
420 package,
421 task,
422 event.msg,
423 icon,
424 color,
425 0))
426 else:
427 if not isinstance(event, (bb.event.BuildBase,
428 bb.event.StampUpdate,
429 bb.event.ConfigParsed,
430 bb.event.RecipeParsed,
431 bb.event.RecipePreFinalise,
432 bb.runqueue.runQueueEvent,
433 bb.runqueue.runQueueExitWait,
434 bb.event.OperationStarted,
435 bb.event.OperationCompleted,
436 bb.event.OperationProgress)):
437 self.emit("log", "error", "Unknown event: %s" % (event.error if hasattr(event, 'error') else 'error'))
438
439 return
440
441
442def do_pastebin(text):
443 url = 'http://pastebin.com/api_public.php'
444 params = {'paste_code': text, 'paste_format': 'text'}
445
446 req = urllib2.Request(url, urllib.urlencode(params))
447 response = urllib2.urlopen(req)
448 paste_url = response.read()
449
450 return paste_url
451
452
453class RunningBuildTreeView (gtk.TreeView):
454 __gsignals__ = {
455 "button_press_event" : "override"
456 }
457 def __init__ (self, readonly=False, hob=False):
458 gtk.TreeView.__init__ (self)
459 self.readonly = readonly
460
461 # The icon that indicates whether we're building or failed.
462 # add 'hob' flag because there has not only hob to share this code
463 if hob:
464 renderer = HobCellRendererPixbuf ()
465 else:
466 renderer = gtk.CellRendererPixbuf()
467 col = gtk.TreeViewColumn ("Status", renderer)
468 col.add_attribute (renderer, "icon-name", 4)
469 self.append_column (col)
470
471 # The message of the build.
472 # add 'hob' flag because there has not only hob to share this code
473 if hob:
474 self.message_renderer = HobWarpCellRendererText (col_number=1)
475 else:
476 self.message_renderer = gtk.CellRendererText ()
477 self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=3)
478 self.message_column.add_attribute(self.message_renderer, 'background', 5)
479 self.message_renderer.set_property('editable', (not self.readonly))
480 self.append_column (self.message_column)
481
482 def do_button_press_event(self, event):
483 gtk.TreeView.do_button_press_event(self, event)
484
485 if event.button == 3:
486 selection = super(RunningBuildTreeView, self).get_selection()
487 (model, it) = selection.get_selected()
488 if it is not None:
489 can_paste = model.get(it, model.COL_LOG)[0]
490 if can_paste == 'pastebin':
491 # build a simple menu with a pastebin option
492 menu = gtk.Menu()
493 menuitem = gtk.MenuItem("Copy")
494 menu.append(menuitem)
495 menuitem.connect("activate", self.clipboard_handler, (model, it))
496 menuitem.show()
497 menuitem = gtk.MenuItem("Send log to pastebin")
498 menu.append(menuitem)
499 menuitem.connect("activate", self.pastebin_handler, (model, it))
500 menuitem.show()
501 menu.show()
502 menu.popup(None, None, None, event.button, event.time)
503
504 def _add_to_clipboard(self, clipping):
505 """
506 Add the contents of clipping to the system clipboard.
507 """
508 clipboard = gtk.clipboard_get()
509 clipboard.set_text(clipping)
510 clipboard.store()
511
512 def pastebin_handler(self, widget, data):
513 """
514 Send the log data to pastebin, then add the new paste url to the
515 clipboard.
516 """
517 (model, it) = data
518 paste_url = do_pastebin(model.get(it, model.COL_MESSAGE)[0])
519
520 # @todo Provide visual feedback to the user that it is done and that
521 # it worked.
522 print paste_url
523
524 self._add_to_clipboard(paste_url)
525
526 def clipboard_handler(self, widget, data):
527 """
528 """
529 (model, it) = data
530 message = model.get(it, model.COL_MESSAGE)[0]
531
532 self._add_to_clipboard(message)
533
534class BuildFailureTreeView(gtk.TreeView):
535
536 def __init__ (self):
537 gtk.TreeView.__init__(self)
538 self.set_rules_hint(False)
539 self.set_headers_visible(False)
540 self.get_selection().set_mode(gtk.SELECTION_SINGLE)
541
542 # The icon that indicates whether we're building or failed.
543 renderer = HobCellRendererPixbuf ()
544 col = gtk.TreeViewColumn ("Status", renderer)
545 col.add_attribute (renderer, "icon-name", RunningBuildModel.COL_ICON)
546 self.append_column (col)
547
548 # The message of the build.
549 self.message_renderer = HobWarpCellRendererText (col_number=1)
550 self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=RunningBuildModel.COL_MESSAGE, background=RunningBuildModel.COL_COLOR)
551 self.append_column (self.message_column)
diff --git a/bitbake/lib/bb/ui/crumbs/sanitycheckpage.py b/bitbake/lib/bb/ui/crumbs/sanitycheckpage.py
new file mode 100644
index 0000000000..76ce2ecc23
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/sanitycheckpage.py
@@ -0,0 +1,85 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2012 Intel Corporation
6#
7# Authored by Bogdan Marinescu <bogdan.a.marinescu@intel.com>
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import gtk, gobject
23from bb.ui.crumbs.progressbar import HobProgressBar
24from bb.ui.crumbs.hobwidget import hic
25from bb.ui.crumbs.hobpages import HobPage
26
27#
28# SanityCheckPage
29#
30class SanityCheckPage (HobPage):
31
32 def __init__(self, builder):
33 super(SanityCheckPage, self).__init__(builder)
34 self.running = False
35 self.create_visual_elements()
36 self.show_all()
37
38 def make_label(self, text, bold=True):
39 label = gtk.Label()
40 label.set_alignment(0.0, 0.5)
41 mark = "<span %s>%s</span>" % (self.span_tag('x-large', 'bold') if bold else self.span_tag('medium'), text)
42 label.set_markup(mark)
43 return label
44
45 def start(self):
46 if not self.running:
47 self.running = True
48 gobject.timeout_add(100, self.timer_func)
49
50 def stop(self):
51 self.running = False
52
53 def is_running(self):
54 return self.running
55
56 def timer_func(self):
57 self.progress_bar.pulse()
58 return self.running
59
60 def create_visual_elements(self):
61 # Table'd layout. 'rows' and 'cols' give the table size
62 rows, cols = 30, 50
63 self.table = gtk.Table(rows, cols, True)
64 self.pack_start(self.table, expand=False, fill=False)
65 sx, sy = 2, 2
66 # 'info' icon
67 image = gtk.Image()
68 image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
69 self.table.attach(image, sx, sx + 2, sy, sy + 3 )
70 image.show()
71 # 'Checking' message
72 label = self.make_label('Hob is checking for correct build system setup')
73 self.table.attach(label, sx + 2, cols, sy, sy + 3, xpadding=5 )
74 label.show()
75 # 'Shouldn't take long' message.
76 label = self.make_label("The check shouldn't take long.", False)
77 self.table.attach(label, sx + 2, cols, sy + 3, sy + 4, xpadding=5)
78 label.show()
79 # Progress bar
80 self.progress_bar = HobProgressBar()
81 self.table.attach(self.progress_bar, sx + 2, cols - 3, sy + 5, sy + 7, xpadding=5)
82 self.progress_bar.show()
83 # All done
84 self.table.show()
85
diff --git a/bitbake/lib/bb/ui/crumbs/utils.py b/bitbake/lib/bb/ui/crumbs/utils.py
new file mode 100644
index 0000000000..939864fa6f
--- /dev/null
+++ b/bitbake/lib/bb/ui/crumbs/utils.py
@@ -0,0 +1,34 @@
1#
2# BitBake UI Utils
3#
4# Copyright (C) 2012 Intel Corporation
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19# This utility method looks for xterm or vte and return the
20# frist to exist, currently we are keeping this simple, but
21# we will likely move the oe.terminal implementation into
22# bitbake which will allow more flexibility.
23
24import os
25import bb
26
27def which_terminal():
28 term = bb.utils.which(os.environ["PATH"], "xterm")
29 if term:
30 return term + " -e "
31 term = bb.utils.which(os.environ["PATH"], "vte")
32 if term:
33 return term + " -c "
34 return None
diff --git a/bitbake/lib/bb/ui/depexp.py b/bitbake/lib/bb/ui/depexp.py
new file mode 100644
index 0000000000..5d13b5b79e
--- /dev/null
+++ b/bitbake/lib/bb/ui/depexp.py
@@ -0,0 +1,326 @@
1#
2# BitBake Graphical GTK based Dependency Explorer
3#
4# Copyright (C) 2007 Ross Burton
5# Copyright (C) 2007 - 2008 Richard Purdie
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20import gobject
21import gtk
22import Queue
23import threading
24import xmlrpclib
25import bb
26import bb.event
27from bb.ui.crumbs.progressbar import HobProgressBar
28
29# Package Model
30(COL_PKG_NAME) = (0)
31
32# Dependency Model
33(TYPE_DEP, TYPE_RDEP) = (0, 1)
34(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
35
36
37class PackageDepView(gtk.TreeView):
38 def __init__(self, model, dep_type, label):
39 gtk.TreeView.__init__(self)
40 self.current = None
41 self.dep_type = dep_type
42 self.filter_model = model.filter_new()
43 self.filter_model.set_visible_func(self._filter)
44 self.set_model(self.filter_model)
45 #self.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
46 self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PACKAGE))
47
48 def _filter(self, model, iter):
49 (this_type, package) = model.get(iter, COL_DEP_TYPE, COL_DEP_PARENT)
50 if this_type != self.dep_type: return False
51 return package == self.current
52
53 def set_current_package(self, package):
54 self.current = package
55 self.filter_model.refilter()
56
57
58class PackageReverseDepView(gtk.TreeView):
59 def __init__(self, model, label):
60 gtk.TreeView.__init__(self)
61 self.current = None
62 self.filter_model = model.filter_new()
63 self.filter_model.set_visible_func(self._filter)
64 self.set_model(self.filter_model)
65 self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PARENT))
66
67 def _filter(self, model, iter):
68 package = model.get_value(iter, COL_DEP_PACKAGE)
69 return package == self.current
70
71 def set_current_package(self, package):
72 self.current = package
73 self.filter_model.refilter()
74
75
76class DepExplorer(gtk.Window):
77 def __init__(self):
78 gtk.Window.__init__(self)
79 self.set_title("Dependency Explorer")
80 self.set_default_size(500, 500)
81 self.connect("delete-event", gtk.main_quit)
82
83 # Create the data models
84 self.pkg_model = gtk.ListStore(gobject.TYPE_STRING)
85 self.pkg_model.set_sort_column_id(COL_PKG_NAME, gtk.SORT_ASCENDING)
86 self.depends_model = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)
87 self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, gtk.SORT_ASCENDING)
88
89 pane = gtk.HPaned()
90 pane.set_position(250)
91 self.add(pane)
92
93 # The master list of packages
94 scrolled = gtk.ScrolledWindow()
95 scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
96 scrolled.set_shadow_type(gtk.SHADOW_IN)
97
98 self.pkg_treeview = gtk.TreeView(self.pkg_model)
99 self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
100 column = gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME)
101 self.pkg_treeview.append_column(column)
102 pane.add1(scrolled)
103 scrolled.add(self.pkg_treeview)
104
105 box = gtk.VBox(homogeneous=True, spacing=4)
106
107 # Runtime Depends
108 scrolled = gtk.ScrolledWindow()
109 scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
110 scrolled.set_shadow_type(gtk.SHADOW_IN)
111 self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends")
112 self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
113 scrolled.add(self.rdep_treeview)
114 box.add(scrolled)
115
116 # Build Depends
117 scrolled = gtk.ScrolledWindow()
118 scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
119 scrolled.set_shadow_type(gtk.SHADOW_IN)
120 self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends")
121 self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
122 scrolled.add(self.dep_treeview)
123 box.add(scrolled)
124 pane.add2(box)
125
126 # Reverse Depends
127 scrolled = gtk.ScrolledWindow()
128 scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
129 scrolled.set_shadow_type(gtk.SHADOW_IN)
130 self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends")
131 self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
132 scrolled.add(self.revdep_treeview)
133 box.add(scrolled)
134 pane.add2(box)
135
136 self.show_all()
137
138 def on_package_activated(self, treeview, path, column, data_col):
139 model = treeview.get_model()
140 package = model.get_value(model.get_iter(path), data_col)
141
142 pkg_path = []
143 def finder(model, path, iter, needle):
144 package = model.get_value(iter, COL_PKG_NAME)
145 if package == needle:
146 pkg_path.append(path)
147 return True
148 else:
149 return False
150 self.pkg_model.foreach(finder, package)
151 if pkg_path:
152 self.pkg_treeview.get_selection().select_path(pkg_path[0])
153 self.pkg_treeview.scroll_to_cell(pkg_path[0])
154
155 def on_cursor_changed(self, selection):
156 (model, it) = selection.get_selected()
157 if it is None:
158 current_package = None
159 else:
160 current_package = model.get_value(it, COL_PKG_NAME)
161 self.rdep_treeview.set_current_package(current_package)
162 self.dep_treeview.set_current_package(current_package)
163 self.revdep_treeview.set_current_package(current_package)
164
165
166 def parse(self, depgraph):
167 for package in depgraph["pn"]:
168 self.pkg_model.insert(0, (package,))
169
170 for package in depgraph["depends"]:
171 for depend in depgraph["depends"][package]:
172 self.depends_model.insert (0, (TYPE_DEP, package, depend))
173
174 for package in depgraph["rdepends-pn"]:
175 for rdepend in depgraph["rdepends-pn"][package]:
176 self.depends_model.insert (0, (TYPE_RDEP, package, rdepend))
177
178
179class gtkthread(threading.Thread):
180 quit = threading.Event()
181 def __init__(self, shutdown):
182 threading.Thread.__init__(self)
183 self.setDaemon(True)
184 self.shutdown = shutdown
185
186 def run(self):
187 gobject.threads_init()
188 gtk.gdk.threads_init()
189 gtk.main()
190 gtkthread.quit.set()
191
192
193def main(server, eventHandler, params):
194 try:
195 params.updateFromServer(server)
196 cmdline = params.parseActions()
197 if not cmdline:
198 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
199 return 1
200 if 'msg' in cmdline and cmdline['msg']:
201 print(cmdline['msg'])
202 return 1
203 cmdline = cmdline['action']
204 if not cmdline or cmdline[0] != "generateDotGraph":
205 print("This UI requires the -g option")
206 return 1
207 ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
208 if error:
209 print("Error running command '%s': %s" % (cmdline, error))
210 return 1
211 elif ret != True:
212 print("Error running command '%s': returned %s" % (cmdline, ret))
213 return 1
214 except xmlrpclib.Fault as x:
215 print("XMLRPC Fault getting commandline:\n %s" % x)
216 return
217
218 shutdown = 0
219
220 gtkgui = gtkthread(shutdown)
221 gtkgui.start()
222
223 gtk.gdk.threads_enter()
224 dep = DepExplorer()
225 bardialog = gtk.Dialog(parent=dep,
226 flags=gtk.DIALOG_MODAL|gtk.DIALOG_DESTROY_WITH_PARENT)
227 bardialog.set_default_size(400, 50)
228 pbar = HobProgressBar()
229 bardialog.vbox.pack_start(pbar)
230 bardialog.show_all()
231 bardialog.connect("delete-event", gtk.main_quit)
232 gtk.gdk.threads_leave()
233
234 progress_total = 0
235 while True:
236 try:
237 event = eventHandler.waitEvent(0.25)
238 if gtkthread.quit.isSet():
239 _, error = server.runCommand(["stateStop"])
240 if error:
241 print('Unable to cleanly stop: %s' % error)
242 break
243
244 if event is None:
245 continue
246
247 if isinstance(event, bb.event.CacheLoadStarted):
248 progress_total = event.total
249 gtk.gdk.threads_enter()
250 bardialog.set_title("Loading Cache")
251 pbar.update(0)
252 gtk.gdk.threads_leave()
253
254 if isinstance(event, bb.event.CacheLoadProgress):
255 x = event.current
256 gtk.gdk.threads_enter()
257 pbar.update(x * 1.0 / progress_total)
258 pbar.set_title('')
259 gtk.gdk.threads_leave()
260 continue
261
262 if isinstance(event, bb.event.CacheLoadCompleted):
263 bardialog.hide()
264 continue
265
266 if isinstance(event, bb.event.ParseStarted):
267 progress_total = event.total
268 if progress_total == 0:
269 continue
270 gtk.gdk.threads_enter()
271 pbar.update(0)
272 bardialog.set_title("Processing recipes")
273
274 gtk.gdk.threads_leave()
275
276 if isinstance(event, bb.event.ParseProgress):
277 x = event.current
278 gtk.gdk.threads_enter()
279 pbar.update(x * 1.0 / progress_total)
280 pbar.set_title('')
281 gtk.gdk.threads_leave()
282 continue
283
284 if isinstance(event, bb.event.ParseCompleted):
285 bardialog.hide()
286 continue
287
288 if isinstance(event, bb.event.DepTreeGenerated):
289 gtk.gdk.threads_enter()
290 dep.parse(event._depgraph)
291 gtk.gdk.threads_leave()
292
293 if isinstance(event, bb.command.CommandCompleted):
294 continue
295
296 if isinstance(event, bb.command.CommandFailed):
297 print("Command execution failed: %s" % event.error)
298 return event.exitcode
299
300 if isinstance(event, bb.command.CommandExit):
301 return event.exitcode
302
303 if isinstance(event, bb.cooker.CookerExit):
304 break
305
306 continue
307 except EnvironmentError as ioerror:
308 # ignore interrupted io
309 if ioerror.args[0] == 4:
310 pass
311 except KeyboardInterrupt:
312 if shutdown == 2:
313 print("\nThird Keyboard Interrupt, exit.\n")
314 break
315 if shutdown == 1:
316 print("\nSecond Keyboard Interrupt, stopping...\n")
317 _, error = server.runCommand(["stateForceShutdown"])
318 if error:
319 print('Unable to cleanly stop: %s' % error)
320 if shutdown == 0:
321 print("\nKeyboard Interrupt, closing down...\n")
322 _, error = server.runCommand(["stateShutdown"])
323 if error:
324 print('Unable to cleanly shutdown: %s' % error)
325 shutdown = shutdown + 1
326 pass
diff --git a/bitbake/lib/bb/ui/goggle.py b/bitbake/lib/bb/ui/goggle.py
new file mode 100644
index 0000000000..f4ee7b41ae
--- /dev/null
+++ b/bitbake/lib/bb/ui/goggle.py
@@ -0,0 +1,121 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2008 Intel Corporation
5#
6# Authored by Rob Bradford <rob@linux.intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gobject
22import gtk
23import xmlrpclib
24from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
25from bb.ui.crumbs.progress import ProgressBar
26
27import Queue
28
29
30def event_handle_idle_func (eventHandler, build, pbar):
31
32 # Consume as many messages as we can in the time available to us
33 event = eventHandler.getEvent()
34 while event:
35 build.handle_event (event, pbar)
36 event = eventHandler.getEvent()
37
38 return True
39
40def scroll_tv_cb (model, path, iter, view):
41 view.scroll_to_cell (path)
42
43
44# @todo hook these into the GUI so the user has feedback...
45def running_build_failed_cb (running_build):
46 pass
47
48
49def running_build_succeeded_cb (running_build):
50 pass
51
52
53class MainWindow (gtk.Window):
54 def __init__ (self):
55 gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
56
57 # Setup tree view and the scrolled window
58 scrolled_window = gtk.ScrolledWindow ()
59 self.add (scrolled_window)
60 self.cur_build_tv = RunningBuildTreeView()
61 self.connect("delete-event", gtk.main_quit)
62 self.set_default_size(640, 480)
63 scrolled_window.add (self.cur_build_tv)
64
65
66def main (server, eventHandler, params):
67 gobject.threads_init()
68 gtk.gdk.threads_init()
69
70 window = MainWindow ()
71 window.show_all ()
72 pbar = ProgressBar(window)
73 pbar.connect("delete-event", gtk.main_quit)
74
75 # Create the object for the current build
76 running_build = RunningBuild ()
77 window.cur_build_tv.set_model (running_build.model)
78 running_build.model.connect("row-inserted", scroll_tv_cb, window.cur_build_tv)
79 running_build.connect ("build-succeeded", running_build_succeeded_cb)
80 running_build.connect ("build-failed", running_build_failed_cb)
81
82 try:
83 params.updateFromServer(server)
84 cmdline = params.parseActions()
85 if not cmdline:
86 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
87 return 1
88 if 'msg' in cmdline and cmdline['msg']:
89 logger.error(cmdline['msg'])
90 return 1
91 cmdline = cmdline['action']
92 ret, error = server.runCommand(cmdline)
93 if error:
94 print("Error running command '%s': %s" % (cmdline, error))
95 return 1
96 elif ret != True:
97 print("Error running command '%s': returned %s" % (cmdline, ret))
98 return 1
99 except xmlrpclib.Fault as x:
100 print("XMLRPC Fault getting commandline:\n %s" % x)
101 return 1
102
103 # Use a timeout function for probing the event queue to find out if we
104 # have a message waiting for us.
105 gobject.timeout_add (100,
106 event_handle_idle_func,
107 eventHandler,
108 running_build,
109 pbar)
110
111 try:
112 gtk.main()
113 except EnvironmentError as ioerror:
114 # ignore interrupted io
115 if ioerror.args[0] == 4:
116 pass
117 except KeyboardInterrupt:
118 pass
119 finally:
120 server.runCommand(["stateForceShutdown"])
121
diff --git a/bitbake/lib/bb/ui/hob.py b/bitbake/lib/bb/ui/hob.py
new file mode 100755
index 0000000000..da5b411891
--- /dev/null
+++ b/bitbake/lib/bb/ui/hob.py
@@ -0,0 +1,109 @@
1#!/usr/bin/env python
2#
3# BitBake Graphical GTK User Interface
4#
5# Copyright (C) 2011 Intel Corporation
6#
7# Authored by Joshua Lock <josh@linux.intel.com>
8# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import sys
24import os
25requirements = "FATAL: Hob requires Gtk+ 2.20.0 or higher, PyGtk 2.21.0 or higher"
26try:
27 import gobject
28 import gtk
29 import pygtk
30 pygtk.require('2.0') # to be certain we don't have gtk+ 1.x !?!
31 gtkver = gtk.gtk_version
32 pygtkver = gtk.pygtk_version
33 if gtkver < (2, 20, 0) or pygtkver < (2, 21, 0):
34 sys.exit("%s,\nYou have Gtk+ %s and PyGtk %s." % (requirements,
35 ".".join(map(str, gtkver)),
36 ".".join(map(str, pygtkver))))
37except ImportError as exc:
38 sys.exit("%s (%s)." % (requirements, str(exc)))
39sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
40try:
41 import bb
42except RuntimeError as exc:
43 sys.exit(str(exc))
44from bb.ui import uihelper
45from bb.ui.crumbs.hoblistmodel import RecipeListModel, PackageListModel
46from bb.ui.crumbs.hobeventhandler import HobHandler
47from bb.ui.crumbs.builder import Builder
48
49featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
50
51def event_handle_idle_func(eventHandler, hobHandler):
52 # Consume as many messages as we can in the time available to us
53 if not eventHandler:
54 return False
55 event = eventHandler.getEvent()
56 while event:
57 hobHandler.handle_event(event)
58 event = eventHandler.getEvent()
59 return True
60
61_evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.LogRecord",
62 "bb.build.TaskFailed", "bb.build.TaskBase", "bb.event.ParseStarted",
63 "bb.event.ParseProgress", "bb.event.ParseCompleted", "bb.event.CacheLoadStarted",
64 "bb.event.CacheLoadProgress", "bb.event.CacheLoadCompleted", "bb.command.CommandFailed",
65 "bb.command.CommandExit", "bb.command.CommandCompleted", "bb.cooker.CookerExit",
66 "bb.event.MultipleProviders", "bb.event.NoProvider", "bb.runqueue.sceneQueueTaskStarted",
67 "bb.runqueue.runQueueTaskStarted", "bb.runqueue.runQueueTaskFailed", "bb.runqueue.sceneQueueTaskFailed",
68 "bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent",
69 "bb.event.SanityCheckPassed", "bb.event.SanityCheckFailed", "bb.event.PackageInfo",
70 "bb.event.TargetsTreeGenerated", "bb.event.ConfigFilesFound", "bb.event.ConfigFilePathFound",
71 "bb.event.FilesMatchingFound", "bb.event.NetworkTestFailed", "bb.event.NetworkTestPassed",
72 "bb.event.BuildStarted", "bb.event.BuildCompleted", "bb.event.DiskFull"]
73
74def main (server, eventHandler, params):
75 params.updateFromServer(server)
76 gobject.threads_init()
77
78 # That indicates whether the Hob and the bitbake server are
79 # running on different machines
80 # recipe model and package model
81 recipe_model = RecipeListModel()
82 package_model = PackageListModel()
83
84 llevel, debug_domains = bb.msg.constructLogOptions()
85 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
86 hobHandler = HobHandler(server, recipe_model, package_model)
87 builder = Builder(hobHandler, recipe_model, package_model)
88
89 # This timeout function regularly probes the event queue to find out if we
90 # have any messages waiting for us.
91 gobject.timeout_add(10, event_handle_idle_func, eventHandler, hobHandler)
92
93 try:
94 gtk.main()
95 except EnvironmentError as ioerror:
96 # ignore interrupted io
97 if ioerror.args[0] == 4:
98 pass
99 finally:
100 hobHandler.cancel_build(force = True)
101
102if __name__ == "__main__":
103 try:
104 ret = main()
105 except Exception:
106 ret = 1
107 import traceback
108 traceback.print_exc(15)
109 sys.exit(ret)
diff --git a/bitbake/lib/bb/ui/icons/images/images_display.png b/bitbake/lib/bb/ui/icons/images/images_display.png
new file mode 100644
index 0000000000..a7f87101af
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/images/images_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/images/images_hover.png b/bitbake/lib/bb/ui/icons/images/images_hover.png
new file mode 100644
index 0000000000..2d9cd99b8e
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/images/images_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/add-hover.png b/bitbake/lib/bb/ui/icons/indicators/add-hover.png
new file mode 100644
index 0000000000..526df770d1
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/add-hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/add.png b/bitbake/lib/bb/ui/icons/indicators/add.png
new file mode 100644
index 0000000000..31e7090d61
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/add.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/alert.png b/bitbake/lib/bb/ui/icons/indicators/alert.png
new file mode 100644
index 0000000000..d1c6f55a2f
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/alert.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/confirmation.png b/bitbake/lib/bb/ui/icons/indicators/confirmation.png
new file mode 100644
index 0000000000..3a5402d1e3
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/confirmation.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/denied.png b/bitbake/lib/bb/ui/icons/indicators/denied.png
new file mode 100644
index 0000000000..ee35c7defa
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/denied.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/error.png b/bitbake/lib/bb/ui/icons/indicators/error.png
new file mode 100644
index 0000000000..d06a8c151a
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/error.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/info.png b/bitbake/lib/bb/ui/icons/indicators/info.png
new file mode 100644
index 0000000000..ee8e8d8462
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/info.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/issues.png b/bitbake/lib/bb/ui/icons/indicators/issues.png
new file mode 100644
index 0000000000..b0c7461334
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/issues.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/refresh.png b/bitbake/lib/bb/ui/icons/indicators/refresh.png
new file mode 100644
index 0000000000..eb6c419db8
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/refresh.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/remove-hover.png b/bitbake/lib/bb/ui/icons/indicators/remove-hover.png
new file mode 100644
index 0000000000..aa57c69982
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/remove-hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/remove.png b/bitbake/lib/bb/ui/icons/indicators/remove.png
new file mode 100644
index 0000000000..05c3c293d4
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/remove.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/indicators/tick.png b/bitbake/lib/bb/ui/icons/indicators/tick.png
new file mode 100644
index 0000000000..beaad361c3
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/indicators/tick.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/info/info_display.png b/bitbake/lib/bb/ui/icons/info/info_display.png
new file mode 100644
index 0000000000..5afbba29f5
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/info/info_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/info/info_hover.png b/bitbake/lib/bb/ui/icons/info/info_hover.png
new file mode 100644
index 0000000000..f9d294dfae
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/info/info_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/layers/layers_display.png b/bitbake/lib/bb/ui/icons/layers/layers_display.png
new file mode 100644
index 0000000000..b7f9053a9e
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/layers/layers_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/layers/layers_hover.png b/bitbake/lib/bb/ui/icons/layers/layers_hover.png
new file mode 100644
index 0000000000..0bf3ce0dbc
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/layers/layers_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/packages/packages_display.png b/bitbake/lib/bb/ui/icons/packages/packages_display.png
new file mode 100644
index 0000000000..f5d0a5064d
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/packages/packages_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/packages/packages_hover.png b/bitbake/lib/bb/ui/icons/packages/packages_hover.png
new file mode 100644
index 0000000000..c081165f34
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/packages/packages_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/recipe/recipe_display.png b/bitbake/lib/bb/ui/icons/recipe/recipe_display.png
new file mode 100644
index 0000000000..e9809bc7d9
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/recipe/recipe_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/recipe/recipe_hover.png b/bitbake/lib/bb/ui/icons/recipe/recipe_hover.png
new file mode 100644
index 0000000000..7e48da9af0
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/recipe/recipe_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/settings/settings_display.png b/bitbake/lib/bb/ui/icons/settings/settings_display.png
new file mode 100644
index 0000000000..88c464db04
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/settings/settings_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/settings/settings_hover.png b/bitbake/lib/bb/ui/icons/settings/settings_hover.png
new file mode 100644
index 0000000000..d92a0bf2c3
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/settings/settings_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/templates/templates_display.png b/bitbake/lib/bb/ui/icons/templates/templates_display.png
new file mode 100644
index 0000000000..153c7afb62
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/templates/templates_display.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/icons/templates/templates_hover.png b/bitbake/lib/bb/ui/icons/templates/templates_hover.png
new file mode 100644
index 0000000000..afb7165fe5
--- /dev/null
+++ b/bitbake/lib/bb/ui/icons/templates/templates_hover.png
Binary files differ
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
new file mode 100644
index 0000000000..9e58b31727
--- /dev/null
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -0,0 +1,559 @@
1#
2# BitBake (No)TTY UI Implementation
3#
4# Handling output to TTYs or files (no TTY)
5#
6# Copyright (C) 2006-2012 Richard Purdie
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21from __future__ import division
22
23import os
24import sys
25import xmlrpclib
26import logging
27import progressbar
28import signal
29import bb.msg
30import time
31import fcntl
32import struct
33import copy
34import atexit
35from bb.ui import uihelper
36
37featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
38
39logger = logging.getLogger("BitBake")
40interactive = sys.stdout.isatty()
41
42class BBProgress(progressbar.ProgressBar):
43 def __init__(self, msg, maxval):
44 self.msg = msg
45 widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
46 progressbar.ETA()]
47
48 try:
49 self._resize_default = signal.getsignal(signal.SIGWINCH)
50 except:
51 self._resize_default = None
52 progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets, fd=sys.stdout)
53
54 def _handle_resize(self, signum, frame):
55 progressbar.ProgressBar._handle_resize(self, signum, frame)
56 if self._resize_default:
57 self._resize_default(signum, frame)
58 def finish(self):
59 progressbar.ProgressBar.finish(self)
60 if self._resize_default:
61 signal.signal(signal.SIGWINCH, self._resize_default)
62
63class NonInteractiveProgress(object):
64 fobj = sys.stdout
65
66 def __init__(self, msg, maxval):
67 self.msg = msg
68 self.maxval = maxval
69
70 def start(self):
71 self.fobj.write("%s..." % self.msg)
72 self.fobj.flush()
73 return self
74
75 def update(self, value):
76 pass
77
78 def finish(self):
79 self.fobj.write("done.\n")
80 self.fobj.flush()
81
82def new_progress(msg, maxval):
83 if interactive:
84 return BBProgress(msg, maxval)
85 else:
86 return NonInteractiveProgress(msg, maxval)
87
88def pluralise(singular, plural, qty):
89 if(qty == 1):
90 return singular % qty
91 else:
92 return plural % qty
93
94
95class InteractConsoleLogFilter(logging.Filter):
96 def __init__(self, tf, format):
97 self.tf = tf
98 self.format = format
99
100 def filter(self, record):
101 if record.levelno == self.format.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")):
102 return False
103 self.tf.clearFooter()
104 return True
105
106class TerminalFilter(object):
107 columns = 80
108
109 def sigwinch_handle(self, signum, frame):
110 self.columns = self.getTerminalColumns()
111 if self._sigwinch_default:
112 self._sigwinch_default(signum, frame)
113
114 def getTerminalColumns(self):
115 def ioctl_GWINSZ(fd):
116 try:
117 cr = struct.unpack('hh', fcntl.ioctl(fd, self.termios.TIOCGWINSZ, '1234'))
118 except:
119 return None
120 return cr
121 cr = ioctl_GWINSZ(sys.stdout.fileno())
122 if not cr:
123 try:
124 fd = os.open(os.ctermid(), os.O_RDONLY)
125 cr = ioctl_GWINSZ(fd)
126 os.close(fd)
127 except:
128 pass
129 if not cr:
130 try:
131 cr = (env['LINES'], env['COLUMNS'])
132 except:
133 cr = (25, 80)
134 return cr[1]
135
136 def __init__(self, main, helper, console, errconsole, format):
137 self.main = main
138 self.helper = helper
139 self.cuu = None
140 self.stdinbackup = None
141 self.interactive = sys.stdout.isatty()
142 self.footer_present = False
143 self.lastpids = []
144
145 if not self.interactive:
146 return
147
148 try:
149 import curses
150 except ImportError:
151 sys.exit("FATAL: The knotty ui could not load the required curses python module.")
152
153 import termios
154 self.curses = curses
155 self.termios = termios
156 try:
157 fd = sys.stdin.fileno()
158 self.stdinbackup = termios.tcgetattr(fd)
159 new = copy.deepcopy(self.stdinbackup)
160 new[3] = new[3] & ~termios.ECHO
161 termios.tcsetattr(fd, termios.TCSADRAIN, new)
162 curses.setupterm()
163 if curses.tigetnum("colors") > 2:
164 format.enable_color()
165 self.ed = curses.tigetstr("ed")
166 if self.ed:
167 self.cuu = curses.tigetstr("cuu")
168 try:
169 self._sigwinch_default = signal.getsignal(signal.SIGWINCH)
170 signal.signal(signal.SIGWINCH, self.sigwinch_handle)
171 except:
172 pass
173 self.columns = self.getTerminalColumns()
174 except:
175 self.cuu = None
176 console.addFilter(InteractConsoleLogFilter(self, format))
177 errconsole.addFilter(InteractConsoleLogFilter(self, format))
178
179 def clearFooter(self):
180 if self.footer_present:
181 lines = self.footer_present
182 sys.stdout.write(self.curses.tparm(self.cuu, lines))
183 sys.stdout.write(self.curses.tparm(self.ed))
184 self.footer_present = False
185
186 def updateFooter(self):
187 if not self.cuu:
188 return
189 activetasks = self.helper.running_tasks
190 failedtasks = self.helper.failed_tasks
191 runningpids = self.helper.running_pids
192 if self.footer_present and (self.lastcount == self.helper.tasknumber_current) and (self.lastpids == runningpids):
193 return
194 if self.footer_present:
195 self.clearFooter()
196 if (not self.helper.tasknumber_total or self.helper.tasknumber_current == self.helper.tasknumber_total) and not len(activetasks):
197 return
198 tasks = []
199 for t in runningpids:
200 tasks.append("%s (pid %s)" % (activetasks[t]["title"], t))
201
202 if self.main.shutdown:
203 content = "Waiting for %s running tasks to finish:" % len(activetasks)
204 elif not len(activetasks):
205 content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
206 else:
207 content = "Currently %s running tasks (%s of %s):" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
208 print(content)
209 lines = 1 + int(len(content) / (self.columns + 1))
210 for tasknum, task in enumerate(tasks):
211 content = "%s: %s" % (tasknum, task)
212 print(content)
213 lines = lines + 1 + int(len(content) / (self.columns + 1))
214 self.footer_present = lines
215 self.lastpids = runningpids[:]
216 self.lastcount = self.helper.tasknumber_current
217
218 def finish(self):
219 if self.stdinbackup:
220 fd = sys.stdin.fileno()
221 self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup)
222
223def _log_settings_from_server(server):
224 # Get values of variables which control our output
225 includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
226 if error:
227 logger.error("Unable to get the value of BBINCLUDELOGS variable: %s" % error)
228 raise BaseException(error)
229 loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
230 if error:
231 logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
232 raise BaseException(error)
233 consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
234 if error:
235 logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
236 raise BaseException(error)
237 return includelogs, loglines, consolelogfile
238
239_evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.LogRecord",
240 "bb.build.TaskFailed", "bb.build.TaskBase", "bb.event.ParseStarted",
241 "bb.event.ParseProgress", "bb.event.ParseCompleted", "bb.event.CacheLoadStarted",
242 "bb.event.CacheLoadProgress", "bb.event.CacheLoadCompleted", "bb.command.CommandFailed",
243 "bb.command.CommandExit", "bb.command.CommandCompleted", "bb.cooker.CookerExit",
244 "bb.event.MultipleProviders", "bb.event.NoProvider", "bb.runqueue.sceneQueueTaskStarted",
245 "bb.runqueue.runQueueTaskStarted", "bb.runqueue.runQueueTaskFailed", "bb.runqueue.sceneQueueTaskFailed",
246 "bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent"]
247
248def main(server, eventHandler, params, tf = TerminalFilter):
249
250 includelogs, loglines, consolelogfile = _log_settings_from_server(server)
251
252 if sys.stdin.isatty() and sys.stdout.isatty():
253 log_exec_tty = True
254 else:
255 log_exec_tty = False
256
257 helper = uihelper.BBUIHelper()
258
259 console = logging.StreamHandler(sys.stdout)
260 errconsole = logging.StreamHandler(sys.stderr)
261 format_str = "%(levelname)s: %(message)s"
262 format = bb.msg.BBLogFormatter(format_str)
263 bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut)
264 bb.msg.addDefaultlogFilter(errconsole, bb.msg.BBLogFilterStdErr)
265 console.setFormatter(format)
266 errconsole.setFormatter(format)
267 logger.addHandler(console)
268 logger.addHandler(errconsole)
269
270 if params.options.remote_server and params.options.kill_server:
271 server.terminateServer()
272 return
273
274 if consolelogfile and not params.options.show_environment:
275 bb.utils.mkdirhier(os.path.dirname(consolelogfile))
276 conlogformat = bb.msg.BBLogFormatter(format_str)
277 consolelog = logging.FileHandler(consolelogfile)
278 bb.msg.addDefaultlogFilter(consolelog)
279 consolelog.setFormatter(conlogformat)
280 logger.addHandler(consolelog)
281
282 llevel, debug_domains = bb.msg.constructLogOptions()
283 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
284
285 if not params.observe_only:
286 params.updateFromServer(server)
287 params.updateToServer(server)
288 cmdline = params.parseActions()
289 if not cmdline:
290 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
291 return 1
292 if 'msg' in cmdline and cmdline['msg']:
293 logger.error(cmdline['msg'])
294 return 1
295
296 ret, error = server.runCommand(cmdline['action'])
297 if error:
298 logger.error("Command '%s' failed: %s" % (cmdline, error))
299 return 1
300 elif ret != True:
301 logger.error("Command '%s' failed: returned %s" % (cmdline, ret))
302 return 1
303
304
305 parseprogress = None
306 cacheprogress = None
307 main.shutdown = 0
308 interrupted = False
309 return_value = 0
310 errors = 0
311 warnings = 0
312 taskfailures = []
313
314 termfilter = tf(main, helper, console, errconsole, format)
315 atexit.register(termfilter.finish)
316
317 while True:
318 try:
319 event = eventHandler.waitEvent(0)
320 if event is None:
321 if main.shutdown > 1:
322 break
323 termfilter.updateFooter()
324 event = eventHandler.waitEvent(0.25)
325 if event is None:
326 continue
327 helper.eventHandler(event)
328 if isinstance(event, bb.runqueue.runQueueExitWait):
329 if not main.shutdown:
330 main.shutdown = 1
331 continue
332 if isinstance(event, bb.event.LogExecTTY):
333 if log_exec_tty:
334 tries = event.retries
335 while tries:
336 print("Trying to run: %s" % event.prog)
337 if os.system(event.prog) == 0:
338 break
339 time.sleep(event.sleep_delay)
340 tries -= 1
341 if tries:
342 continue
343 logger.warn(event.msg)
344 continue
345
346 if isinstance(event, logging.LogRecord):
347 if event.levelno >= format.ERROR:
348 errors = errors + 1
349 return_value = 1
350 elif event.levelno == format.WARNING:
351 warnings = warnings + 1
352 # For "normal" logging conditions, don't show note logs from tasks
353 # but do show them if the user has changed the default log level to
354 # include verbose/debug messages
355 if event.taskpid != 0 and event.levelno <= format.NOTE and (event.levelno < llevel or (event.levelno == format.NOTE and llevel != format.VERBOSE)):
356 continue
357 logger.handle(event)
358 continue
359
360 if isinstance(event, bb.build.TaskFailedSilent):
361 logger.warn("Logfile for failed setscene task is %s" % event.logfile)
362 continue
363 if isinstance(event, bb.build.TaskFailed):
364 return_value = 1
365 logfile = event.logfile
366 if logfile and os.path.exists(logfile):
367 termfilter.clearFooter()
368 bb.error("Logfile of failure stored in: %s" % logfile)
369 if includelogs and not event.errprinted:
370 print("Log data follows:")
371 f = open(logfile, "r")
372 lines = []
373 while True:
374 l = f.readline()
375 if l == '':
376 break
377 l = l.rstrip()
378 if loglines:
379 lines.append(' | %s' % l)
380 if len(lines) > int(loglines):
381 lines.pop(0)
382 else:
383 print('| %s' % l)
384 f.close()
385 if lines:
386 for line in lines:
387 print(line)
388 if isinstance(event, bb.build.TaskBase):
389 logger.info(event._message)
390 continue
391 if isinstance(event, bb.event.ParseStarted):
392 if event.total == 0:
393 continue
394 parseprogress = new_progress("Parsing recipes", event.total).start()
395 continue
396 if isinstance(event, bb.event.ParseProgress):
397 parseprogress.update(event.current)
398 continue
399 if isinstance(event, bb.event.ParseCompleted):
400 if not parseprogress:
401 continue
402
403 parseprogress.finish()
404 print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
405 % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
406 continue
407
408 if isinstance(event, bb.event.CacheLoadStarted):
409 cacheprogress = new_progress("Loading cache", event.total).start()
410 continue
411 if isinstance(event, bb.event.CacheLoadProgress):
412 cacheprogress.update(event.current)
413 continue
414 if isinstance(event, bb.event.CacheLoadCompleted):
415 cacheprogress.finish()
416 print("Loaded %d entries from dependency cache." % event.num_entries)
417 continue
418
419 if isinstance(event, bb.command.CommandFailed):
420 return_value = event.exitcode
421 if event.error:
422 errors = errors + 1
423 logger.error("Command execution failed: %s", event.error)
424 main.shutdown = 2
425 continue
426 if isinstance(event, bb.command.CommandExit):
427 if not return_value:
428 return_value = event.exitcode
429 continue
430 if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
431 main.shutdown = 2
432 continue
433 if isinstance(event, bb.event.MultipleProviders):
434 logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
435 event._item,
436 ", ".join(event._candidates))
437 logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
438 continue
439 if isinstance(event, bb.event.NoProvider):
440 return_value = 1
441 errors = errors + 1
442 if event._runtime:
443 r = "R"
444 else:
445 r = ""
446
447 extra = ''
448 if not event._reasons:
449 if event._close_matches:
450 extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
451
452 if event._dependees:
453 logger.error("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s", r, event._item, ", ".join(event._dependees), r, extra)
454 else:
455 logger.error("Nothing %sPROVIDES '%s'%s", r, event._item, extra)
456 if event._reasons:
457 for reason in event._reasons:
458 logger.error("%s", reason)
459 continue
460
461 if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
462 logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring))
463 continue
464
465 if isinstance(event, bb.runqueue.runQueueTaskStarted):
466 if event.noexec:
467 tasktype = 'noexec task'
468 else:
469 tasktype = 'task'
470 logger.info("Running %s %s of %s (ID: %s, %s)",
471 tasktype,
472 event.stats.completed + event.stats.active +
473 event.stats.failed + 1,
474 event.stats.total, event.taskid, event.taskstring)
475 continue
476
477 if isinstance(event, bb.runqueue.runQueueTaskFailed):
478 taskfailures.append(event.taskstring)
479 logger.error("Task %s (%s) failed with exit code '%s'",
480 event.taskid, event.taskstring, event.exitcode)
481 continue
482
483 if isinstance(event, bb.runqueue.sceneQueueTaskFailed):
484 logger.warn("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead",
485 event.taskid, event.taskstring, event.exitcode)
486 continue
487
488 if isinstance(event, bb.event.DepTreeGenerated):
489 continue
490
491 # ignore
492 if isinstance(event, (bb.event.BuildBase,
493 bb.event.MetadataEvent,
494 bb.event.StampUpdate,
495 bb.event.ConfigParsed,
496 bb.event.RecipeParsed,
497 bb.event.RecipePreFinalise,
498 bb.runqueue.runQueueEvent,
499 bb.event.OperationStarted,
500 bb.event.OperationCompleted,
501 bb.event.OperationProgress,
502 bb.event.DiskFull)):
503 continue
504
505 logger.error("Unknown event: %s", event)
506
507 except EnvironmentError as ioerror:
508 termfilter.clearFooter()
509 # ignore interrupted io
510 if ioerror.args[0] == 4:
511 continue
512 sys.stderr.write(str(ioerror))
513 if not params.observe_only:
514 _, error = server.runCommand(["stateForceShutdown"])
515 main.shutdown = 2
516 except KeyboardInterrupt:
517 termfilter.clearFooter()
518 if params.observe_only:
519 print("\nKeyboard Interrupt, exiting observer...")
520 main.shutdown = 2
521 if not params.observe_only and main.shutdown == 1:
522 print("\nSecond Keyboard Interrupt, stopping...\n")
523 _, error = server.runCommand(["stateForceShutdown"])
524 if error:
525 logger.error("Unable to cleanly stop: %s" % error)
526 if not params.observe_only and main.shutdown == 0:
527 print("\nKeyboard Interrupt, closing down...\n")
528 interrupted = True
529 _, error = server.runCommand(["stateShutdown"])
530 if error:
531 logger.error("Unable to cleanly shutdown: %s" % error)
532 main.shutdown = main.shutdown + 1
533 pass
534 except Exception as e:
535 sys.stderr.write(str(e))
536 if not params.observe_only:
537 _, error = server.runCommand(["stateForceShutdown"])
538 main.shutdown = 2
539 summary = ""
540 if taskfailures:
541 summary += pluralise("\nSummary: %s task failed:",
542 "\nSummary: %s tasks failed:", len(taskfailures))
543 for failure in taskfailures:
544 summary += "\n %s" % failure
545 if warnings:
546 summary += pluralise("\nSummary: There was %s WARNING message shown.",
547 "\nSummary: There were %s WARNING messages shown.", warnings)
548 if return_value and errors:
549 summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
550 "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
551 if summary:
552 print(summary)
553
554 if interrupted:
555 print("Execution was interrupted, returning a non-zero exit code.")
556 if return_value == 0:
557 return_value = 1
558
559 return return_value
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
new file mode 100644
index 0000000000..b6c20ec388
--- /dev/null
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -0,0 +1,373 @@
1#
2# BitBake Curses UI Implementation
3#
4# Implements an ncurses frontend for the BitBake utility.
5#
6# Copyright (C) 2006 Michael 'Mickey' Lauer
7# Copyright (C) 2006-2007 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22"""
23 We have the following windows:
24
25 1.) Main Window: Shows what we are ultimately building and how far we are. Includes status bar
26 2.) Thread Activity Window: Shows one status line for every concurrent bitbake thread.
27 3.) Command Line Window: Contains an interactive command line where you can interact w/ Bitbake.
28
29 Basic window layout is like that:
30
31 |---------------------------------------------------------|
32 | <Main Window> | <Thread Activity Window> |
33 | | 0: foo do_compile complete|
34 | Building Gtk+-2.6.10 | 1: bar do_patch complete |
35 | Status: 60% | ... |
36 | | ... |
37 | | ... |
38 |---------------------------------------------------------|
39 |<Command Line Window> |
40 |>>> which virtual/kernel |
41 |openzaurus-kernel |
42 |>>> _ |
43 |---------------------------------------------------------|
44
45"""
46
47
48from __future__ import division
49import logging
50import os, sys, itertools, time, subprocess
51
52try:
53 import curses
54except ImportError:
55 sys.exit("FATAL: The ncurses ui could not load the required curses python module.")
56
57import bb
58import xmlrpclib
59from bb import ui
60from bb.ui import uihelper
61
62parsespin = itertools.cycle( r'|/-\\' )
63
64X = 0
65Y = 1
66WIDTH = 2
67HEIGHT = 3
68
69MAXSTATUSLENGTH = 32
70
71class NCursesUI:
72 """
73 NCurses UI Class
74 """
75 class Window:
76 """Base Window Class"""
77 def __init__( self, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
78 self.win = curses.newwin( height, width, y, x )
79 self.dimensions = ( x, y, width, height )
80 """
81 if curses.has_colors():
82 color = 1
83 curses.init_pair( color, fg, bg )
84 self.win.bkgdset( ord(' '), curses.color_pair(color) )
85 else:
86 self.win.bkgdset( ord(' '), curses.A_BOLD )
87 """
88 self.erase()
89 self.setScrolling()
90 self.win.noutrefresh()
91
92 def erase( self ):
93 self.win.erase()
94
95 def setScrolling( self, b = True ):
96 self.win.scrollok( b )
97 self.win.idlok( b )
98
99 def setBoxed( self ):
100 self.boxed = True
101 self.win.box()
102 self.win.noutrefresh()
103
104 def setText( self, x, y, text, *args ):
105 self.win.addstr( y, x, text, *args )
106 self.win.noutrefresh()
107
108 def appendText( self, text, *args ):
109 self.win.addstr( text, *args )
110 self.win.noutrefresh()
111
112 def drawHline( self, y ):
113 self.win.hline( y, 0, curses.ACS_HLINE, self.dimensions[WIDTH] )
114 self.win.noutrefresh()
115
116 class DecoratedWindow( Window ):
117 """Base class for windows with a box and a title bar"""
118 def __init__( self, title, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
119 NCursesUI.Window.__init__( self, x+1, y+3, width-2, height-4, fg, bg )
120 self.decoration = NCursesUI.Window( x, y, width, height, fg, bg )
121 self.decoration.setBoxed()
122 self.decoration.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
123 self.setTitle( title )
124
125 def setTitle( self, title ):
126 self.decoration.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
127
128 #-------------------------------------------------------------------------#
129# class TitleWindow( Window ):
130 #-------------------------------------------------------------------------#
131# """Title Window"""
132# def __init__( self, x, y, width, height ):
133# NCursesUI.Window.__init__( self, x, y, width, height )
134# version = bb.__version__
135# title = "BitBake %s" % version
136# credit = "(C) 2003-2007 Team BitBake"
137# #self.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
138# self.win.border()
139# self.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
140# self.setText( 1, 2, credit.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
141
142 #-------------------------------------------------------------------------#
143 class ThreadActivityWindow( DecoratedWindow ):
144 #-------------------------------------------------------------------------#
145 """Thread Activity Window"""
146 def __init__( self, x, y, width, height ):
147 NCursesUI.DecoratedWindow.__init__( self, "Thread Activity", x, y, width, height )
148
149 def setStatus( self, thread, text ):
150 line = "%02d: %s" % ( thread, text )
151 width = self.dimensions[WIDTH]
152 if ( len(line) > width ):
153 line = line[:width-3] + "..."
154 else:
155 line = line.ljust( width )
156 self.setText( 0, thread, line )
157
158 #-------------------------------------------------------------------------#
159 class MainWindow( DecoratedWindow ):
160 #-------------------------------------------------------------------------#
161 """Main Window"""
162 def __init__( self, x, y, width, height ):
163 self.StatusPosition = width - MAXSTATUSLENGTH
164 NCursesUI.DecoratedWindow.__init__( self, None, x, y, width, height )
165 curses.nl()
166
167 def setTitle( self, title ):
168 title = "BitBake %s" % bb.__version__
169 self.decoration.setText( 2, 1, title, curses.A_BOLD )
170 self.decoration.setText( self.StatusPosition - 8, 1, "Status:", curses.A_BOLD )
171
172 def setStatus(self, status):
173 while len(status) < MAXSTATUSLENGTH:
174 status = status + " "
175 self.decoration.setText( self.StatusPosition, 1, status, curses.A_BOLD )
176
177
178 #-------------------------------------------------------------------------#
179 class ShellOutputWindow( DecoratedWindow ):
180 #-------------------------------------------------------------------------#
181 """Interactive Command Line Output"""
182 def __init__( self, x, y, width, height ):
183 NCursesUI.DecoratedWindow.__init__( self, "Command Line Window", x, y, width, height )
184
185 #-------------------------------------------------------------------------#
186 class ShellInputWindow( Window ):
187 #-------------------------------------------------------------------------#
188 """Interactive Command Line Input"""
189 def __init__( self, x, y, width, height ):
190 NCursesUI.Window.__init__( self, x, y, width, height )
191
192# put that to the top again from curses.textpad import Textbox
193# self.textbox = Textbox( self.win )
194# t = threading.Thread()
195# t.run = self.textbox.edit
196# t.start()
197
198 #-------------------------------------------------------------------------#
199 def main(self, stdscr, server, eventHandler, params):
200 #-------------------------------------------------------------------------#
201 height, width = stdscr.getmaxyx()
202
203 # for now split it like that:
204 # MAIN_y + THREAD_y = 2/3 screen at the top
205 # MAIN_x = 2/3 left, THREAD_y = 1/3 right
206 # CLI_y = 1/3 of screen at the bottom
207 # CLI_x = full
208
209 main_left = 0
210 main_top = 0
211 main_height = ( height // 3 * 2 )
212 main_width = ( width // 3 ) * 2
213 clo_left = main_left
214 clo_top = main_top + main_height
215 clo_height = height - main_height - main_top - 1
216 clo_width = width
217 cli_left = main_left
218 cli_top = clo_top + clo_height
219 cli_height = 1
220 cli_width = width
221 thread_left = main_left + main_width
222 thread_top = main_top
223 thread_height = main_height
224 thread_width = width - main_width
225
226 #tw = self.TitleWindow( 0, 0, width, main_top )
227 mw = self.MainWindow( main_left, main_top, main_width, main_height )
228 taw = self.ThreadActivityWindow( thread_left, thread_top, thread_width, thread_height )
229 clo = self.ShellOutputWindow( clo_left, clo_top, clo_width, clo_height )
230 cli = self.ShellInputWindow( cli_left, cli_top, cli_width, cli_height )
231 cli.setText( 0, 0, "BB>" )
232
233 mw.setStatus("Idle")
234
235 helper = uihelper.BBUIHelper()
236 shutdown = 0
237
238 try:
239 params.updateFromServer(server)
240 cmdline = params.parseActions()
241 if not cmdline:
242 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
243 return 1
244 if 'msg' in cmdline and cmdline['msg']:
245 logger.error(cmdline['msg'])
246 return 1
247 cmdline = cmdline['action']
248 ret, error = server.runCommand(cmdline)
249 if error:
250 print("Error running command '%s': %s" % (cmdline, error))
251 return
252 elif ret != True:
253 print("Couldn't get default commandlind! %s" % ret)
254 return
255 except xmlrpclib.Fault as x:
256 print("XMLRPC Fault getting commandline:\n %s" % x)
257 return
258
259 exitflag = False
260 while not exitflag:
261 try:
262 event = eventHandler.waitEvent(0.25)
263 if not event:
264 continue
265
266 helper.eventHandler(event)
267 if isinstance(event, bb.build.TaskBase):
268 mw.appendText("NOTE: %s\n" % event._message)
269 if isinstance(event, logging.LogRecord):
270 mw.appendText(logging.getLevelName(event.levelno) + ': ' + event.getMessage() + '\n')
271
272 if isinstance(event, bb.event.CacheLoadStarted):
273 self.parse_total = event.total
274 if isinstance(event, bb.event.CacheLoadProgress):
275 x = event.current
276 y = self.parse_total
277 mw.setStatus("Loading Cache: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
278 if isinstance(event, bb.event.CacheLoadCompleted):
279 mw.setStatus("Idle")
280 mw.appendText("Loaded %d entries from dependency cache.\n"
281 % ( event.num_entries))
282
283 if isinstance(event, bb.event.ParseStarted):
284 self.parse_total = event.total
285 if isinstance(event, bb.event.ParseProgress):
286 x = event.current
287 y = self.parse_total
288 mw.setStatus("Parsing Recipes: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
289 if isinstance(event, bb.event.ParseCompleted):
290 mw.setStatus("Idle")
291 mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked.\n"
292 % ( event.cached, event.parsed, event.skipped, event.masked ))
293
294# if isinstance(event, bb.build.TaskFailed):
295# if event.logfile:
296# if data.getVar("BBINCLUDELOGS", d):
297# bb.error("log data follows (%s)" % logfile)
298# number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
299# if number_of_lines:
300# subprocess.call('tail -n%s %s' % (number_of_lines, logfile), shell=True)
301# else:
302# f = open(logfile, "r")
303# while True:
304# l = f.readline()
305# if l == '':
306# break
307# l = l.rstrip()
308# print '| %s' % l
309# f.close()
310# else:
311# bb.error("see log in %s" % logfile)
312
313 if isinstance(event, bb.command.CommandCompleted):
314 # stop so the user can see the result of the build, but
315 # also allow them to now exit with a single ^C
316 shutdown = 2
317 if isinstance(event, bb.command.CommandFailed):
318 mw.appendText("Command execution failed: %s" % event.error)
319 time.sleep(2)
320 exitflag = True
321 if isinstance(event, bb.command.CommandExit):
322 exitflag = True
323 if isinstance(event, bb.cooker.CookerExit):
324 exitflag = True
325
326 if isinstance(event, bb.event.LogExecTTY):
327 mw.appendText('WARN: ' + event.msg + '\n')
328 if helper.needUpdate:
329 activetasks, failedtasks = helper.getTasks()
330 taw.erase()
331 taw.setText(0, 0, "")
332 if activetasks:
333 taw.appendText("Active Tasks:\n")
334 for task in activetasks.itervalues():
335 taw.appendText(task["title"] + '\n')
336 if failedtasks:
337 taw.appendText("Failed Tasks:\n")
338 for task in failedtasks:
339 taw.appendText(task["title"] + '\n')
340
341 curses.doupdate()
342 except EnvironmentError as ioerror:
343 # ignore interrupted io
344 if ioerror.args[0] == 4:
345 pass
346
347 except KeyboardInterrupt:
348 if shutdown == 2:
349 mw.appendText("Third Keyboard Interrupt, exit.\n")
350 exitflag = True
351 if shutdown == 1:
352 mw.appendText("Second Keyboard Interrupt, stopping...\n")
353 _, error = server.runCommand(["stateForceShutdown"])
354 if error:
355 print("Unable to cleanly stop: %s" % error)
356 if shutdown == 0:
357 mw.appendText("Keyboard Interrupt, closing down...\n")
358 _, error = server.runCommand(["stateShutdown"])
359 if error:
360 print("Unable to cleanly shutdown: %s" % error)
361 shutdown = shutdown + 1
362 pass
363
364def main(server, eventHandler):
365 if not os.isatty(sys.stdout.fileno()):
366 print("FATAL: Unable to run 'ncurses' UI without a TTY.")
367 return
368 ui = NCursesUI()
369 try:
370 curses.wrapper(ui.main, server, eventHandler)
371 except:
372 import traceback
373 traceback.print_exc()
diff --git a/bitbake/lib/bb/ui/puccho.py b/bitbake/lib/bb/ui/puccho.py
new file mode 100644
index 0000000000..3ce4590c16
--- /dev/null
+++ b/bitbake/lib/bb/ui/puccho.py
@@ -0,0 +1,425 @@
1#
2# BitBake Graphical GTK User Interface
3#
4# Copyright (C) 2008 Intel Corporation
5#
6# Authored by Rob Bradford <rob@linux.intel.com>
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21import gtk
22import gobject
23import gtk.glade
24import threading
25import urllib2
26import os
27import contextlib
28
29from bb.ui.crumbs.buildmanager import BuildManager, BuildConfiguration
30from bb.ui.crumbs.buildmanager import BuildManagerTreeView
31
32from bb.ui.crumbs.runningbuild import RunningBuild, RunningBuildTreeView
33
34# The metadata loader is used by the BuildSetupDialog to download the
35# available options to populate the dialog
36class MetaDataLoader(gobject.GObject):
37 """ This class provides the mechanism for loading the metadata (the
38 fetching and parsing) from a given URL. The metadata encompasses details
39 on what machines are available. The distribution and images available for
40 the machine and the the uris to use for building the given machine."""
41 __gsignals__ = {
42 'success' : (gobject.SIGNAL_RUN_LAST,
43 gobject.TYPE_NONE,
44 ()),
45 'error' : (gobject.SIGNAL_RUN_LAST,
46 gobject.TYPE_NONE,
47 (gobject.TYPE_STRING,))
48 }
49
50 # We use these little helper functions to ensure that we take the gdk lock
51 # when emitting the signal. These functions are called as idles (so that
52 # they happen in the gtk / main thread's main loop.
53 def emit_error_signal (self, remark):
54 gtk.gdk.threads_enter()
55 self.emit ("error", remark)
56 gtk.gdk.threads_leave()
57
58 def emit_success_signal (self):
59 gtk.gdk.threads_enter()
60 self.emit ("success")
61 gtk.gdk.threads_leave()
62
63 def __init__ (self):
64 gobject.GObject.__init__ (self)
65
66 class LoaderThread(threading.Thread):
67 """ This class provides an asynchronous loader for the metadata (by
68 using threads and signals). This is useful since the metadata may be
69 at a remote URL."""
70 class LoaderImportException (Exception):
71 pass
72
73 def __init__(self, loader, url):
74 threading.Thread.__init__ (self)
75 self.url = url
76 self.loader = loader
77
78 def run (self):
79 result = {}
80 try:
81 with contextlib.closing (urllib2.urlopen (self.url)) as f:
82 # Parse the metadata format. The format is....
83 # <machine>;<default distro>|<distro>...;<default image>|<image>...;<type##url>|...
84 for line in f:
85 components = line.split(";")
86 if (len (components) < 4):
87 raise MetaDataLoader.LoaderThread.LoaderImportException
88 machine = components[0]
89 distros = components[1].split("|")
90 images = components[2].split("|")
91 urls = components[3].split("|")
92
93 result[machine] = (distros, images, urls)
94
95 # Create an object representing this *potential*
96 # configuration. It can become concrete if the machine, distro
97 # and image are all chosen in the UI
98 configuration = BuildConfiguration()
99 configuration.metadata_url = self.url
100 configuration.machine_options = result
101 self.loader.configuration = configuration
102
103 # Emit that we've actually got a configuration
104 gobject.idle_add (MetaDataLoader.emit_success_signal,
105 self.loader)
106
107 except MetaDataLoader.LoaderThread.LoaderImportException as e:
108 gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
109 "Repository metadata corrupt")
110 except Exception as e:
111 gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
112 "Unable to download repository metadata")
113 print(e)
114
115 def try_fetch_from_url (self, url):
116 # Try and download the metadata. Firing a signal if successful
117 thread = MetaDataLoader.LoaderThread(self, url)
118 thread.start()
119
120class BuildSetupDialog (gtk.Dialog):
121 RESPONSE_BUILD = 1
122
123 # A little helper method that just sets the states on the widgets based on
124 # whether we've got good metadata or not.
125 def set_configurable (self, configurable):
126 if (self.configurable == configurable):
127 return
128
129 self.configurable = configurable
130 for widget in self.conf_widgets:
131 widget.set_sensitive (configurable)
132
133 if not configurable:
134 self.machine_combo.set_active (-1)
135 self.distribution_combo.set_active (-1)
136 self.image_combo.set_active (-1)
137
138 # GTK widget callbacks
139 def refresh_button_clicked (self, button):
140 # Refresh button clicked.
141
142 url = self.location_entry.get_chars (0, -1)
143 self.loader.try_fetch_from_url(url)
144
145 def repository_entry_editable_changed (self, entry):
146 if (len (entry.get_chars (0, -1)) > 0):
147 self.refresh_button.set_sensitive (True)
148 else:
149 self.refresh_button.set_sensitive (False)
150 self.clear_status_message()
151
152 # If we were previously configurable we are no longer since the
153 # location entry has been changed
154 self.set_configurable (False)
155
156 def machine_combo_changed (self, combobox):
157 active_iter = combobox.get_active_iter()
158
159 if not active_iter:
160 return
161
162 model = combobox.get_model()
163
164 if model:
165 chosen_machine = model.get (active_iter, 0)[0]
166
167 (distros_model, images_model) = \
168 self.loader.configuration.get_distro_and_images_models (chosen_machine)
169
170 self.distribution_combo.set_model (distros_model)
171 self.image_combo.set_model (images_model)
172
173 # Callbacks from the loader
174 def loader_success_cb (self, loader):
175 self.status_image.set_from_icon_name ("info",
176 gtk.ICON_SIZE_BUTTON)
177 self.status_image.show()
178 self.status_label.set_label ("Repository metadata successfully downloaded")
179
180 # Set the models on the combo boxes based on the models generated from
181 # the configuration that the loader has created
182
183 # We just need to set the machine here, that then determines the
184 # distro and image options. Cunning huh? :-)
185
186 self.configuration = self.loader.configuration
187 model = self.configuration.get_machines_model ()
188 self.machine_combo.set_model (model)
189
190 self.set_configurable (True)
191
192 def loader_error_cb (self, loader, message):
193 self.status_image.set_from_icon_name ("error",
194 gtk.ICON_SIZE_BUTTON)
195 self.status_image.show()
196 self.status_label.set_text ("Error downloading repository metadata")
197 for widget in self.conf_widgets:
198 widget.set_sensitive (False)
199
200 def clear_status_message (self):
201 self.status_image.hide()
202 self.status_label.set_label (
203 """<i>Enter the repository location and press _Refresh</i>""")
204
205 def __init__ (self):
206 gtk.Dialog.__init__ (self)
207
208 # Cancel
209 self.add_button (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
210
211 # Build
212 button = gtk.Button ("_Build", None, True)
213 image = gtk.Image ()
214 image.set_from_stock (gtk.STOCK_EXECUTE, gtk.ICON_SIZE_BUTTON)
215 button.set_image (image)
216 self.add_action_widget (button, BuildSetupDialog.RESPONSE_BUILD)
217 button.show_all ()
218
219 # Pull in *just* the table from the Glade XML data.
220 gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
221 root = "build_table")
222 table = gxml.get_widget ("build_table")
223 self.vbox.pack_start (table, True, False, 0)
224
225 # Grab all the widgets that we need to turn on/off when we refresh...
226 self.conf_widgets = []
227 self.conf_widgets += [gxml.get_widget ("machine_label")]
228 self.conf_widgets += [gxml.get_widget ("distribution_label")]
229 self.conf_widgets += [gxml.get_widget ("image_label")]
230 self.conf_widgets += [gxml.get_widget ("machine_combo")]
231 self.conf_widgets += [gxml.get_widget ("distribution_combo")]
232 self.conf_widgets += [gxml.get_widget ("image_combo")]
233
234 # Grab the status widgets
235 self.status_image = gxml.get_widget ("status_image")
236 self.status_label = gxml.get_widget ("status_label")
237
238 # Grab the refresh button and connect to the clicked signal
239 self.refresh_button = gxml.get_widget ("refresh_button")
240 self.refresh_button.connect ("clicked", self.refresh_button_clicked)
241
242 # Grab the location entry and connect to editable::changed
243 self.location_entry = gxml.get_widget ("location_entry")
244 self.location_entry.connect ("changed",
245 self.repository_entry_editable_changed)
246
247 # Grab the machine combo and hook onto the changed signal. This then
248 # allows us to populate the distro and image combos
249 self.machine_combo = gxml.get_widget ("machine_combo")
250 self.machine_combo.connect ("changed", self.machine_combo_changed)
251
252 # Setup the combo
253 cell = gtk.CellRendererText()
254 self.machine_combo.pack_start(cell, True)
255 self.machine_combo.add_attribute(cell, 'text', 0)
256
257 # Grab the distro and image combos. We need these to populate with
258 # models once the machine is chosen
259 self.distribution_combo = gxml.get_widget ("distribution_combo")
260 cell = gtk.CellRendererText()
261 self.distribution_combo.pack_start(cell, True)
262 self.distribution_combo.add_attribute(cell, 'text', 0)
263
264 self.image_combo = gxml.get_widget ("image_combo")
265 cell = gtk.CellRendererText()
266 self.image_combo.pack_start(cell, True)
267 self.image_combo.add_attribute(cell, 'text', 0)
268
269 # Put the default descriptive text in the status box
270 self.clear_status_message()
271
272 # Mark as non-configurable, this is just greys out the widgets the
273 # user can't yet use
274 self.configurable = False
275 self.set_configurable(False)
276
277 # Show the table
278 table.show_all ()
279
280 # The loader and some signals connected to it to update the status
281 # area
282 self.loader = MetaDataLoader()
283 self.loader.connect ("success", self.loader_success_cb)
284 self.loader.connect ("error", self.loader_error_cb)
285
286 def update_configuration (self):
287 """ A poorly named function but it updates the internal configuration
288 from the widgets. This can make that configuration concrete and can
289 thus be used for building """
290 # Extract the chosen machine from the combo
291 model = self.machine_combo.get_model()
292 active_iter = self.machine_combo.get_active_iter()
293 if (active_iter):
294 self.configuration.machine = model.get(active_iter, 0)[0]
295
296 # Extract the chosen distro from the combo
297 model = self.distribution_combo.get_model()
298 active_iter = self.distribution_combo.get_active_iter()
299 if (active_iter):
300 self.configuration.distro = model.get(active_iter, 0)[0]
301
302 # Extract the chosen image from the combo
303 model = self.image_combo.get_model()
304 active_iter = self.image_combo.get_active_iter()
305 if (active_iter):
306 self.configuration.image = model.get(active_iter, 0)[0]
307
308# This function operates to pull events out from the event queue and then push
309# them into the RunningBuild (which then drives the RunningBuild which then
310# pushes through and updates the progress tree view.)
311#
312# TODO: Should be a method on the RunningBuild class
313def event_handle_timeout (eventHandler, build):
314 # Consume as many messages as we can ...
315 event = eventHandler.getEvent()
316 while event:
317 build.handle_event (event)
318 event = eventHandler.getEvent()
319 return True
320
321class MainWindow (gtk.Window):
322
323 # Callback that gets fired when the user hits a button in the
324 # BuildSetupDialog.
325 def build_dialog_box_response_cb (self, dialog, response_id):
326 conf = None
327 if (response_id == BuildSetupDialog.RESPONSE_BUILD):
328 dialog.update_configuration()
329 print(dialog.configuration.machine, dialog.configuration.distro, \
330 dialog.configuration.image)
331 conf = dialog.configuration
332
333 dialog.destroy()
334
335 if conf:
336 self.manager.do_build (conf)
337
338 def build_button_clicked_cb (self, button):
339 dialog = BuildSetupDialog ()
340
341 # For some unknown reason Dialog.run causes nice little deadlocks ... :-(
342 dialog.connect ("response", self.build_dialog_box_response_cb)
343 dialog.show()
344
345 def __init__ (self):
346 gtk.Window.__init__ (self)
347
348 # Pull in *just* the main vbox from the Glade XML data and then pack
349 # that inside the window
350 gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
351 root = "main_window_vbox")
352 vbox = gxml.get_widget ("main_window_vbox")
353 self.add (vbox)
354
355 # Create the tree views for the build manager view and the progress view
356 self.build_manager_view = BuildManagerTreeView()
357 self.running_build_view = RunningBuildTreeView()
358
359 # Grab the scrolled windows that we put the tree views into
360 self.results_scrolledwindow = gxml.get_widget ("results_scrolledwindow")
361 self.progress_scrolledwindow = gxml.get_widget ("progress_scrolledwindow")
362
363 # Put the tree views inside ...
364 self.results_scrolledwindow.add (self.build_manager_view)
365 self.progress_scrolledwindow.add (self.running_build_view)
366
367 # Hook up the build button...
368 self.build_button = gxml.get_widget ("main_toolbutton_build")
369 self.build_button.connect ("clicked", self.build_button_clicked_cb)
370
371# I'm not very happy about the current ownership of the RunningBuild. I have
372# my suspicions that this object should be held by the BuildManager since we
373# care about the signals in the manager
374
375def running_build_succeeded_cb (running_build, manager):
376 # Notify the manager that a build has succeeded. This is necessary as part
377 # of the 'hack' that we use for making the row in the model / view
378 # representing the ongoing build change into a row representing the
379 # completed build. Since we know only one build can be running a time then
380 # we can handle this.
381
382 # FIXME: Refactor all this so that the RunningBuild is owned by the
383 # BuildManager. It can then hook onto the signals directly and drive
384 # interesting things it cares about.
385 manager.notify_build_succeeded ()
386 print("build succeeded")
387
388def running_build_failed_cb (running_build, manager):
389 # As above
390 print("build failed")
391 manager.notify_build_failed ()
392
393def main (server, eventHandler):
394 # Initialise threading...
395 gobject.threads_init()
396 gtk.gdk.threads_init()
397
398 main_window = MainWindow ()
399 main_window.show_all ()
400
401 # Set up the build manager stuff in general
402 builds_dir = os.path.join (os.getcwd(), "results")
403 manager = BuildManager (server, builds_dir)
404 main_window.build_manager_view.set_model (manager.model)
405
406 # Do the running build setup
407 running_build = RunningBuild ()
408 main_window.running_build_view.set_model (running_build.model)
409 running_build.connect ("build-succeeded", running_build_succeeded_cb,
410 manager)
411 running_build.connect ("build-failed", running_build_failed_cb, manager)
412
413 # We need to save the manager into the MainWindow so that the toolbar
414 # button can use it.
415 # FIXME: Refactor ?
416 main_window.manager = manager
417
418 # Use a timeout function for probing the event queue to find out if we
419 # have a message waiting for us.
420 gobject.timeout_add (200,
421 event_handle_timeout,
422 eventHandler,
423 running_build)
424
425 gtk.main()
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py
new file mode 100644
index 0000000000..1626aa6c34
--- /dev/null
+++ b/bitbake/lib/bb/ui/toasterui.py
@@ -0,0 +1,312 @@
1#
2# BitBake ToasterUI Implementation
3# based on (No)TTY UI Implementation by Richard Purdie
4#
5# Handling output to TTYs or files (no TTY)
6#
7# Copyright (C) 2006-2012 Richard Purdie
8# Copyright (C) 2013 Intel Corporation
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23from __future__ import division
24try:
25 import bb
26except RuntimeError as exc:
27 sys.exit(str(exc))
28
29from bb.ui import uihelper
30from bb.ui.buildinfohelper import BuildInfoHelper
31
32import bb.msg
33import copy
34import fcntl
35import logging
36import os
37import progressbar
38import signal
39import struct
40import sys
41import time
42import xmlrpclib
43
44featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
45
46logger = logging.getLogger("BitBake")
47interactive = sys.stdout.isatty()
48
49
50
51def _log_settings_from_server(server):
52 # Get values of variables which control our output
53 includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
54 if error:
55 logger.error("Unable to get the value of BBINCLUDELOGS variable: %s" % error)
56 raise BaseException(error)
57 loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
58 if error:
59 logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
60 raise BaseException(error)
61 return includelogs, loglines
62
63def main(server, eventHandler, params ):
64
65 includelogs, loglines = _log_settings_from_server(server)
66
67 # verify and warn
68 build_history_enabled = True
69 inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
70 if not "buildhistory" in inheritlist.split(" "):
71 logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
72 build_history_enabled = False
73
74 helper = uihelper.BBUIHelper()
75
76 console = logging.StreamHandler(sys.stdout)
77 format_str = "%(levelname)s: %(message)s"
78 format = bb.msg.BBLogFormatter(format_str)
79 bb.msg.addDefaultlogFilter(console)
80 console.setFormatter(format)
81 logger.addHandler(console)
82
83 if not params.observe_only:
84 logger.error("ToasterUI can only work in observer mode")
85 return
86
87
88 main.shutdown = 0
89 interrupted = False
90 return_value = 0
91 errors = 0
92 warnings = 0
93 taskfailures = []
94 first = True
95
96 buildinfohelper = BuildInfoHelper(server, build_history_enabled)
97
98 while True:
99 try:
100 event = eventHandler.waitEvent(0.25)
101 if first:
102 first = False
103 logger.info("ToasterUI waiting for events")
104
105 if event is None:
106 if main.shutdown > 0:
107 break
108 continue
109
110 helper.eventHandler(event)
111
112 if isinstance(event, bb.event.BuildStarted):
113 buildinfohelper.store_started_build(event)
114
115 if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
116 buildinfohelper.update_and_store_task(event)
117 continue
118
119 if isinstance(event, bb.event.LogExecTTY):
120 logger.warn(event.msg)
121 continue
122
123 if isinstance(event, logging.LogRecord):
124 buildinfohelper.store_log_event(event)
125 if event.levelno >= format.ERROR:
126 errors = errors + 1
127 return_value = 1
128 elif event.levelno == format.WARNING:
129 warnings = warnings + 1
130 # For "normal" logging conditions, don't show note logs from tasks
131 # but do show them if the user has changed the default log level to
132 # include verbose/debug messages
133 if event.taskpid != 0 and event.levelno <= format.NOTE:
134 continue
135
136 logger.handle(event)
137 continue
138
139 if isinstance(event, bb.build.TaskFailed):
140 buildinfohelper.update_and_store_task(event)
141 return_value = 1
142 logfile = event.logfile
143 if logfile and os.path.exists(logfile):
144 bb.error("Logfile of failure stored in: %s" % logfile)
145 continue
146
147 # these events are unprocessed now, but may be used in the future to log
148 # timing and error informations from the parsing phase in Toaster
149 if isinstance(event, (bb.event.SanityCheckPassed, bb.event.SanityCheck)):
150 continue
151 if isinstance(event, bb.event.ParseStarted):
152 continue
153 if isinstance(event, bb.event.ParseProgress):
154 continue
155 if isinstance(event, bb.event.ParseCompleted):
156 continue
157 if isinstance(event, bb.event.CacheLoadStarted):
158 continue
159 if isinstance(event, bb.event.CacheLoadProgress):
160 continue
161 if isinstance(event, bb.event.CacheLoadCompleted):
162 continue
163 if isinstance(event, bb.event.MultipleProviders):
164 continue
165 if isinstance(event, bb.event.NoProvider):
166 return_value = 1
167 errors = errors + 1
168 if event._runtime:
169 r = "R"
170 else:
171 r = ""
172
173 if event._dependees:
174 text = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)" % (r, event._item, ", ".join(event._dependees), r)
175 else:
176 text = "Nothing %sPROVIDES '%s'" % (r, event._item)
177
178 logger.error(text)
179 if event._reasons:
180 for reason in event._reasons:
181 logger.error("%s", reason)
182 text += reason
183 buildinfohelper.store_log_error(text)
184 continue
185
186 if isinstance(event, bb.event.ConfigParsed):
187 continue
188 if isinstance(event, bb.event.RecipeParsed):
189 continue
190
191 # end of saved events
192
193 if isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped)):
194 buildinfohelper.store_started_task(event)
195 continue
196
197 if isinstance(event, bb.runqueue.runQueueTaskCompleted):
198 buildinfohelper.update_and_store_task(event)
199 continue
200
201 if isinstance(event, bb.runqueue.runQueueTaskFailed):
202 buildinfohelper.update_and_store_task(event)
203 taskfailures.append(event.taskstring)
204 logger.error("Task %s (%s) failed with exit code '%s'",
205 event.taskid, event.taskstring, event.exitcode)
206 continue
207
208 if isinstance(event, (bb.runqueue.sceneQueueTaskCompleted, bb.runqueue.sceneQueueTaskFailed)):
209 buildinfohelper.update_and_store_task(event)
210 continue
211
212
213 if isinstance(event, (bb.event.TreeDataPreparationStarted, bb.event.TreeDataPreparationCompleted)):
214 continue
215
216 if isinstance(event, (bb.event.BuildCompleted)):
217 continue
218
219 if isinstance(event, (bb.command.CommandCompleted,
220 bb.command.CommandFailed,
221 bb.command.CommandExit)):
222 if (isinstance(event, bb.command.CommandFailed)):
223 event.levelno = format.ERROR
224 event.msg = "Command Failed " + event.error
225 event.pathname = ""
226 event.lineno = 0
227 buildinfohelper.store_log_event(event)
228 errors += 1
229
230 buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
231 buildinfohelper.close()
232
233
234 # we start a new build info
235 if buildinfohelper.brbe is not None:
236
237 print "we are under BuildEnvironment management - after the build, we exit"
238 server.terminateServer()
239 else:
240 print "prepared for new build"
241 errors = 0
242 warnings = 0
243 taskfailures = []
244 buildinfohelper = BuildInfoHelper(server, build_history_enabled)
245
246 continue
247
248 if isinstance(event, bb.event.MetadataEvent):
249 if event.type == "SinglePackageInfo":
250 buildinfohelper.store_build_package_information(event)
251 elif event.type == "LayerInfo":
252 buildinfohelper.store_layer_info(event)
253 elif event.type == "BuildStatsList":
254 buildinfohelper.store_tasks_stats(event)
255 elif event.type == "ImagePkgList":
256 buildinfohelper.store_target_package_data(event)
257 elif event.type == "MissedSstate":
258 buildinfohelper.store_missed_state_tasks(event)
259 elif event.type == "ImageFileSize":
260 buildinfohelper.update_target_image_file(event)
261 elif event.type == "LicenseManifestPath":
262 buildinfohelper.store_license_manifest_path(event)
263 continue
264
265 if isinstance(event, bb.cooker.CookerExit):
266 # exit when the server exits
267 break
268
269 # ignore
270 if isinstance(event, (bb.event.BuildBase,
271 bb.event.StampUpdate,
272 bb.event.RecipePreFinalise,
273 bb.runqueue.runQueueEvent,
274 bb.runqueue.runQueueExitWait,
275 bb.event.OperationProgress,
276 bb.command.CommandFailed,
277 bb.command.CommandExit,
278 bb.command.CommandCompleted)):
279 continue
280
281 if isinstance(event, bb.event.DepTreeGenerated):
282 buildinfohelper.store_dependency_information(event)
283 continue
284
285 logger.error("Unknown event: %s", event)
286
287 except EnvironmentError as ioerror:
288 # ignore interrupted io
289 if ioerror.args[0] == 4:
290 pass
291 except KeyboardInterrupt:
292 main.shutdown = 1
293 pass
294 except Exception as e:
295 # print errors to log
296 import traceback
297 exception_data = traceback.format_exc()
298 logger.error("%s\n%s" % (e, exception_data))
299
300 # save them to database, if possible; if it fails, we already logged to console.
301 try:
302 buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data))
303 except Exception as ce:
304 logger.error("CRITICAL - Failed to to save toaster exception to the database: %s" % str(ce))
305
306 pass
307
308 if interrupted:
309 if return_value == 0:
310 return_value = 1
311
312 return return_value
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
new file mode 100644
index 0000000000..c6b100c840
--- /dev/null
+++ b/bitbake/lib/bb/ui/uievent.py
@@ -0,0 +1,150 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
5# Copyright (C) 2006 - 2007 Richard Purdie
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20
21"""
22Use this class to fork off a thread to recieve event callbacks from the bitbake
23server and queue them for the UI to process. This process must be used to avoid
24client/server deadlocks.
25"""
26
27import socket, threading, pickle
28from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
29
30class BBUIEventQueue:
31 def __init__(self, BBServer, clientinfo=("localhost, 0")):
32
33 self.eventQueue = []
34 self.eventQueueLock = threading.Lock()
35 self.eventQueueNotify = threading.Event()
36
37 self.BBServer = BBServer
38 self.clientinfo = clientinfo
39
40 server = UIXMLRPCServer(self.clientinfo)
41 self.host, self.port = server.socket.getsockname()
42
43 server.register_function( self.system_quit, "event.quit" )
44 server.register_function( self.send_event, "event.sendpickle" )
45 server.socket.settimeout(1)
46
47 self.EventHandler = None
48 count_tries = 0
49
50 # the event handler registration may fail here due to cooker being in invalid state
51 # this is a transient situation, and we should retry a couple of times before
52 # giving up
53
54 while self.EventHandler == None and count_tries < 5:
55 self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
56
57 if (self.EventHandle != None):
58 break
59
60 bb.warn("Could not register UI event handler %s:%d, retry" % (self.host, self.port))
61 count_tries += 1
62 import time
63 time.sleep(1)
64
65
66 if self.EventHandle == None:
67 raise Exception("Could not register UI event handler")
68
69 self.server = server
70
71 self.t = threading.Thread()
72 self.t.setDaemon(True)
73 self.t.run = self.startCallbackHandler
74 self.t.start()
75
76 def getEvent(self):
77
78 self.eventQueueLock.acquire()
79
80 if len(self.eventQueue) == 0:
81 self.eventQueueLock.release()
82 return None
83
84 item = self.eventQueue.pop(0)
85
86 if len(self.eventQueue) == 0:
87 self.eventQueueNotify.clear()
88
89 self.eventQueueLock.release()
90 return item
91
92 def waitEvent(self, delay):
93 self.eventQueueNotify.wait(delay)
94 return self.getEvent()
95
96 def queue_event(self, event):
97 self.eventQueueLock.acquire()
98 self.eventQueue.append(event)
99 self.eventQueueNotify.set()
100 self.eventQueueLock.release()
101
102 def send_event(self, event):
103 self.queue_event(pickle.loads(event))
104
105 def startCallbackHandler(self):
106
107 self.server.timeout = 1
108 while not self.server.quit:
109 self.server.handle_request()
110 self.server.server_close()
111
112 def system_quit( self ):
113 """
114 Shut down the callback thread
115 """
116 try:
117 self.BBServer.unregisterEventHandler(self.EventHandle)
118 except:
119 pass
120 self.server.quit = True
121
122class UIXMLRPCServer (SimpleXMLRPCServer):
123
124 def __init__( self, interface ):
125 self.quit = False
126 SimpleXMLRPCServer.__init__( self,
127 interface,
128 requestHandler=SimpleXMLRPCRequestHandler,
129 logRequests=False, allow_none=True)
130
131 def get_request(self):
132 while not self.quit:
133 try:
134 sock, addr = self.socket.accept()
135 sock.settimeout(1)
136 return (sock, addr)
137 except socket.timeout:
138 pass
139 return (None, None)
140
141 def close_request(self, request):
142 if request is None:
143 return
144 SimpleXMLRPCServer.close_request(self, request)
145
146 def process_request(self, request, client_address):
147 if request is None:
148 return
149 SimpleXMLRPCServer.process_request(self, request, client_address)
150
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
new file mode 100644
index 0000000000..a703387fb8
--- /dev/null
+++ b/bitbake/lib/bb/ui/uihelper.py
@@ -0,0 +1,100 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
5# Copyright (C) 2006 - 2007 Richard Purdie
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20import bb.build
21
22class BBUIHelper:
23 def __init__(self):
24 self.needUpdate = False
25 self.running_tasks = {}
26 # Running PIDs preserves the order tasks were executed in
27 self.running_pids = []
28 self.failed_tasks = []
29 self.tasknumber_current = 0
30 self.tasknumber_total = 0
31
32 def eventHandler(self, event):
33 if isinstance(event, bb.build.TaskStarted):
34 self.running_tasks[event.pid] = { 'title' : "%s %s" % (event._package, event._task) }
35 self.running_pids.append(event.pid)
36 self.needUpdate = True
37 if isinstance(event, bb.build.TaskSucceeded):
38 del self.running_tasks[event.pid]
39 self.running_pids.remove(event.pid)
40 self.needUpdate = True
41 if isinstance(event, bb.build.TaskFailedSilent):
42 del self.running_tasks[event.pid]
43 self.running_pids.remove(event.pid)
44 # Don't add to the failed tasks list since this is e.g. a setscene task failure
45 self.needUpdate = True
46 if isinstance(event, bb.build.TaskFailed):
47 del self.running_tasks[event.pid]
48 self.running_pids.remove(event.pid)
49 self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
50 self.needUpdate = True
51 if isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
52 self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1
53 self.tasknumber_total = event.stats.total
54 self.needUpdate = True
55
56 def getTasks(self):
57 self.needUpdate = False
58 return (self.running_tasks, self.failed_tasks)
59
60 def findServerDetails(self):
61 import sys
62 import optparse
63 from bb.server.xmlrpc import BitbakeServerInfo, BitBakeServerConnection
64 host = ""
65 port = 0
66 bind = ""
67 parser = optparse.OptionParser(
68 usage = """%prog -H host -P port -B bindaddr""")
69
70 parser.add_option("-H", "--host", help = "Bitbake server's IP address",
71 action = "store", dest = "host", default = None)
72
73 parser.add_option("-P", "--port", help = "Bitbake server's Port number",
74 action = "store", dest = "port", default = None)
75
76 parser.add_option("-B", "--bind", help = "Hob2 local bind address",
77 action = "store", dest = "bind", default = None)
78
79 options, args = parser.parse_args(sys.argv)
80 for key, val in options.__dict__.items():
81 if key == 'host' and val:
82 host = val
83 elif key == 'port' and val:
84 port = int(val)
85 elif key == 'bind' and val:
86 bind = val
87
88 if not host or not port or not bind:
89 parser.print_usage()
90 sys.exit(1)
91
92 serverinfo = BitbakeServerInfo(host, port)
93 clientinfo = (bind, 0)
94 connection = BitBakeServerConnection(serverinfo, clientinfo)
95
96 server = connection.connection
97 eventHandler = connection.events
98
99 return server, eventHandler, host, bind
100
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
new file mode 100644
index 0000000000..2562db8e47
--- /dev/null
+++ b/bitbake/lib/bb/utils.py
@@ -0,0 +1,916 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Utility Functions
5"""
6
7# Copyright (C) 2004 Michael Lauer
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import re, fcntl, os, string, stat, shutil, time
23import sys
24import errno
25import logging
26import bb
27import bb.msg
28import multiprocessing
29import fcntl
30import subprocess
31import glob
32import traceback
33import errno
34from commands import getstatusoutput
35from contextlib import contextmanager
36
37logger = logging.getLogger("BitBake.Util")
38
39def clean_context():
40 return {
41 "os": os,
42 "bb": bb,
43 "time": time,
44 }
45
46def get_context():
47 return _context
48
49
50def set_context(ctx):
51 _context = ctx
52
53# Context used in better_exec, eval
54_context = clean_context()
55
56def explode_version(s):
57 r = []
58 alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
59 numeric_regexp = re.compile('^(\d+)(.*)$')
60 while (s != ''):
61 if s[0] in string.digits:
62 m = numeric_regexp.match(s)
63 r.append((0, int(m.group(1))))
64 s = m.group(2)
65 continue
66 if s[0] in string.letters:
67 m = alpha_regexp.match(s)
68 r.append((1, m.group(1)))
69 s = m.group(2)
70 continue
71 if s[0] == '~':
72 r.append((-1, s[0]))
73 else:
74 r.append((2, s[0]))
75 s = s[1:]
76 return r
77
78def split_version(s):
79 """Split a version string into its constituent parts (PE, PV, PR)"""
80 s = s.strip(" <>=")
81 e = 0
82 if s.count(':'):
83 e = int(s.split(":")[0])
84 s = s.split(":")[1]
85 r = ""
86 if s.count('-'):
87 r = s.rsplit("-", 1)[1]
88 s = s.rsplit("-", 1)[0]
89 v = s
90 return (e, v, r)
91
92def vercmp_part(a, b):
93 va = explode_version(a)
94 vb = explode_version(b)
95 while True:
96 if va == []:
97 (oa, ca) = (0, None)
98 else:
99 (oa, ca) = va.pop(0)
100 if vb == []:
101 (ob, cb) = (0, None)
102 else:
103 (ob, cb) = vb.pop(0)
104 if (oa, ca) == (0, None) and (ob, cb) == (0, None):
105 return 0
106 if oa < ob:
107 return -1
108 elif oa > ob:
109 return 1
110 elif ca < cb:
111 return -1
112 elif ca > cb:
113 return 1
114
115def vercmp(ta, tb):
116 (ea, va, ra) = ta
117 (eb, vb, rb) = tb
118
119 r = int(ea or 0) - int(eb or 0)
120 if (r == 0):
121 r = vercmp_part(va, vb)
122 if (r == 0):
123 r = vercmp_part(ra, rb)
124 return r
125
126def vercmp_string(a, b):
127 ta = split_version(a)
128 tb = split_version(b)
129 return vercmp(ta, tb)
130
131def explode_deps(s):
132 """
133 Take an RDEPENDS style string of format:
134 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
135 and return a list of dependencies.
136 Version information is ignored.
137 """
138 r = []
139 l = s.split()
140 flag = False
141 for i in l:
142 if i[0] == '(':
143 flag = True
144 #j = []
145 if not flag:
146 r.append(i)
147 #else:
148 # j.append(i)
149 if flag and i.endswith(')'):
150 flag = False
151 # Ignore version
152 #r[-1] += ' ' + ' '.join(j)
153 return r
154
155def explode_dep_versions2(s):
156 """
157 Take an RDEPENDS style string of format:
158 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
159 and return a dictionary of dependencies and versions.
160 """
161 r = {}
162 l = s.replace(",", "").split()
163 lastdep = None
164 lastcmp = ""
165 lastver = ""
166 incmp = False
167 inversion = False
168 for i in l:
169 if i[0] == '(':
170 incmp = True
171 i = i[1:].strip()
172 if not i:
173 continue
174
175 if incmp:
176 incmp = False
177 inversion = True
178 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
179 #
180 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
181 # we list each possibly valid item.
182 # The build system is responsible for validation of what it supports.
183 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
184 lastcmp = i[0:2]
185 i = i[2:]
186 elif i.startswith(('<', '>', '=')):
187 lastcmp = i[0:1]
188 i = i[1:]
189 else:
190 # This is an unsupported case!
191 lastcmp = (i or "")
192 i = ""
193 i.strip()
194 if not i:
195 continue
196
197 if inversion:
198 if i.endswith(')'):
199 i = i[:-1] or ""
200 inversion = False
201 if lastver and i:
202 lastver += " "
203 if i:
204 lastver += i
205 if lastdep not in r:
206 r[lastdep] = []
207 r[lastdep].append(lastcmp + " " + lastver)
208 continue
209
210 #if not inversion:
211 lastdep = i
212 lastver = ""
213 lastcmp = ""
214 if not (i in r and r[i]):
215 r[lastdep] = []
216
217 return r
218
219def explode_dep_versions(s):
220 r = explode_dep_versions2(s)
221 for d in r:
222 if not r[d]:
223 r[d] = None
224 continue
225 if len(r[d]) > 1:
226 bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
227 r[d] = r[d][0]
228 return r
229
230def join_deps(deps, commasep=True):
231 """
232 Take the result from explode_dep_versions and generate a dependency string
233 """
234 result = []
235 for dep in deps:
236 if deps[dep]:
237 if isinstance(deps[dep], list):
238 for v in deps[dep]:
239 result.append(dep + " (" + v + ")")
240 else:
241 result.append(dep + " (" + deps[dep] + ")")
242 else:
243 result.append(dep)
244 if commasep:
245 return ", ".join(result)
246 else:
247 return " ".join(result)
248
249def _print_trace(body, line):
250 """
251 Print the Environment of a Text Body
252 """
253 error = []
254 # print the environment of the method
255 min_line = max(1, line-4)
256 max_line = min(line + 4, len(body))
257 for i in range(min_line, max_line + 1):
258 if line == i:
259 error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
260 else:
261 error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
262 return error
263
264def better_compile(text, file, realfile, mode = "exec"):
265 """
266 A better compile method. This method
267 will print the offending lines.
268 """
269 try:
270 return compile(text, file, mode)
271 except Exception as e:
272 error = []
273 # split the text into lines again
274 body = text.split('\n')
275 error.append("Error in compiling python function in %s:\n" % realfile)
276 if e.lineno:
277 error.append("The code lines resulting in this error were:")
278 error.extend(_print_trace(body, e.lineno))
279 else:
280 error.append("The function causing this error was:")
281 for line in body:
282 error.append(line)
283 error.append("%s: %s" % (e.__class__.__name__, str(e)))
284
285 logger.error("\n".join(error))
286
287 e = bb.BBHandledException(e)
288 raise e
289
290def _print_exception(t, value, tb, realfile, text, context):
291 error = []
292 try:
293 exception = traceback.format_exception_only(t, value)
294 error.append('Error executing a python function in %s:\n' % realfile)
295
296 # Strip 'us' from the stack (better_exec call)
297 tb = tb.tb_next
298
299 textarray = text.split('\n')
300
301 linefailed = tb.tb_lineno
302
303 tbextract = traceback.extract_tb(tb)
304 tbformat = traceback.format_list(tbextract)
305 error.append("The stack trace of python calls that resulted in this exception/failure was:")
306 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
307 error.extend(_print_trace(textarray, linefailed))
308
309 # See if this is a function we constructed and has calls back into other functions in
310 # "text". If so, try and improve the context of the error by diving down the trace
311 level = 0
312 nexttb = tb.tb_next
313 while nexttb is not None and (level+1) < len(tbextract):
314 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
315 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
316 # The code was possibly in the string we compiled ourselves
317 error.extend(_print_trace(textarray, tbextract[level+1][1]))
318 elif tbextract[level+1][0].startswith("/"):
319 # The code looks like it might be in a file, try and load it
320 try:
321 with open(tbextract[level+1][0], "r") as f:
322 text = f.readlines()
323 error.extend(_print_trace(text, tbextract[level+1][1]))
324 except:
325 error.append(tbformat[level+1])
326 elif "d" in context and tbextract[level+1][2]:
327 # Try and find the code in the datastore based on the functionname
328 d = context["d"]
329 functionname = tbextract[level+1][2]
330 text = d.getVar(functionname, True)
331 if text:
332 error.extend(_print_trace(text.split('\n'), tbextract[level+1][1]))
333 else:
334 error.append(tbformat[level+1])
335 else:
336 error.append(tbformat[level+1])
337 nexttb = tb.tb_next
338 level = level + 1
339
340 error.append("Exception: %s" % ''.join(exception))
341 finally:
342 logger.error("\n".join(error))
343
344def better_exec(code, context, text = None, realfile = "<code>"):
345 """
346 Similiar to better_compile, better_exec will
347 print the lines that are responsible for the
348 error.
349 """
350 import bb.parse
351 if not text:
352 text = code
353 if not hasattr(code, "co_filename"):
354 code = better_compile(code, realfile, realfile)
355 try:
356 exec(code, get_context(), context)
357 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
358 # Error already shown so passthrough, no need for traceback
359 raise
360 except Exception as e:
361 (t, value, tb) = sys.exc_info()
362 try:
363 _print_exception(t, value, tb, realfile, text, context)
364 except Exception as e:
365 logger.error("Exception handler error: %s" % str(e))
366
367 e = bb.BBHandledException(e)
368 raise e
369
370def simple_exec(code, context):
371 exec(code, get_context(), context)
372
373def better_eval(source, locals):
374 return eval(source, get_context(), locals)
375
376@contextmanager
377def fileslocked(files):
378 """Context manager for locking and unlocking file locks."""
379 locks = []
380 if files:
381 for lockfile in files:
382 locks.append(bb.utils.lockfile(lockfile))
383
384 yield
385
386 for lock in locks:
387 bb.utils.unlockfile(lock)
388
389def lockfile(name, shared=False, retry=True):
390 """
391 Use the file fn as a lock file, return when the lock has been acquired.
392 Returns a variable to pass to unlockfile().
393 """
394 dirname = os.path.dirname(name)
395 mkdirhier(dirname)
396
397 if not os.access(dirname, os.W_OK):
398 logger.error("Unable to acquire lock '%s', directory is not writable",
399 name)
400 sys.exit(1)
401
402 op = fcntl.LOCK_EX
403 if shared:
404 op = fcntl.LOCK_SH
405 if not retry:
406 op = op | fcntl.LOCK_NB
407
408 while True:
409 # If we leave the lockfiles lying around there is no problem
410 # but we should clean up after ourselves. This gives potential
411 # for races though. To work around this, when we acquire the lock
412 # we check the file we locked was still the lock file on disk.
413 # by comparing inode numbers. If they don't match or the lockfile
414 # no longer exists, we start again.
415
416 # This implementation is unfair since the last person to request the
417 # lock is the most likely to win it.
418
419 try:
420 lf = open(name, 'a+')
421 fileno = lf.fileno()
422 fcntl.flock(fileno, op)
423 statinfo = os.fstat(fileno)
424 if os.path.exists(lf.name):
425 statinfo2 = os.stat(lf.name)
426 if statinfo.st_ino == statinfo2.st_ino:
427 return lf
428 lf.close()
429 except Exception:
430 try:
431 lf.close()
432 except Exception:
433 pass
434 pass
435 if not retry:
436 return None
437
438def unlockfile(lf):
439 """
440 Unlock a file locked using lockfile()
441 """
442 try:
443 # If we had a shared lock, we need to promote to exclusive before
444 # removing the lockfile. Attempt this, ignore failures.
445 fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
446 os.unlink(lf.name)
447 except (IOError, OSError):
448 pass
449 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
450 lf.close()
451
452def md5_file(filename):
453 """
454 Return the hex string representation of the MD5 checksum of filename.
455 """
456 try:
457 import hashlib
458 m = hashlib.md5()
459 except ImportError:
460 import md5
461 m = md5.new()
462
463 with open(filename, "rb") as f:
464 for line in f:
465 m.update(line)
466 return m.hexdigest()
467
468def sha256_file(filename):
469 """
470 Return the hex string representation of the 256-bit SHA checksum of
471 filename. On Python 2.4 this will return None, so callers will need to
472 handle that by either skipping SHA checks, or running a standalone sha256sum
473 binary.
474 """
475 try:
476 import hashlib
477 except ImportError:
478 return None
479
480 s = hashlib.sha256()
481 with open(filename, "rb") as f:
482 for line in f:
483 s.update(line)
484 return s.hexdigest()
485
486def preserved_envvars_exported():
487 """Variables which are taken from the environment and placed in and exported
488 from the metadata"""
489 return [
490 'BB_TASKHASH',
491 'HOME',
492 'LOGNAME',
493 'PATH',
494 'PWD',
495 'SHELL',
496 'TERM',
497 'USER',
498 ]
499
500def preserved_envvars():
501 """Variables which are taken from the environment and placed in the metadata"""
502 v = [
503 'BBPATH',
504 'BB_PRESERVE_ENV',
505 'BB_ENV_WHITELIST',
506 'BB_ENV_EXTRAWHITE',
507 ]
508 return v + preserved_envvars_exported()
509
510def filter_environment(good_vars):
511 """
512 Create a pristine environment for bitbake. This will remove variables that
513 are not known and may influence the build in a negative way.
514 """
515
516 removed_vars = {}
517 for key in os.environ.keys():
518 if key in good_vars:
519 continue
520
521 removed_vars[key] = os.environ[key]
522 os.unsetenv(key)
523 del os.environ[key]
524
525 if len(removed_vars):
526 logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
527
528 return removed_vars
529
530def approved_variables():
531 """
532 Determine and return the list of whitelisted variables which are approved
533 to remain in the environment.
534 """
535 if 'BB_PRESERVE_ENV' in os.environ:
536 return os.environ.keys()
537 approved = []
538 if 'BB_ENV_WHITELIST' in os.environ:
539 approved = os.environ['BB_ENV_WHITELIST'].split()
540 approved.extend(['BB_ENV_WHITELIST'])
541 else:
542 approved = preserved_envvars()
543 if 'BB_ENV_EXTRAWHITE' in os.environ:
544 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
545 if 'BB_ENV_EXTRAWHITE' not in approved:
546 approved.extend(['BB_ENV_EXTRAWHITE'])
547 return approved
548
549def clean_environment():
550 """
551 Clean up any spurious environment variables. This will remove any
552 variables the user hasn't chosen to preserve.
553 """
554 if 'BB_PRESERVE_ENV' not in os.environ:
555 good_vars = approved_variables()
556 return filter_environment(good_vars)
557
558 return {}
559
560def empty_environment():
561 """
562 Remove all variables from the environment.
563 """
564 for s in os.environ.keys():
565 os.unsetenv(s)
566 del os.environ[s]
567
568def build_environment(d):
569 """
570 Build an environment from all exported variables.
571 """
572 import bb.data
573 for var in bb.data.keys(d):
574 export = d.getVarFlag(var, "export")
575 if export:
576 os.environ[var] = d.getVar(var, True) or ""
577
578def _check_unsafe_delete_path(path):
579 """
580 Basic safeguard against recursively deleting something we shouldn't. If it returns True,
581 the caller should raise an exception with an appropriate message.
582 NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
583 with potentially disastrous results.
584 """
585 extra = ''
586 # HOME might not be /home/something, so in case we can get it, check against it
587 homedir = os.environ.get('HOME', '')
588 if homedir:
589 extra = '|%s' % homedir
590 if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
591 return True
592 return False
593
594def remove(path, recurse=False):
595 """Equivalent to rm -f or rm -rf"""
596 if not path:
597 return
598 if recurse:
599 for name in glob.glob(path):
600 if _check_unsafe_delete_path(path):
601 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
602 # shutil.rmtree(name) would be ideal but its too slow
603 subprocess.call(['rm', '-rf'] + glob.glob(path))
604 return
605 for name in glob.glob(path):
606 try:
607 os.unlink(name)
608 except OSError as exc:
609 if exc.errno != errno.ENOENT:
610 raise
611
612def prunedir(topdir):
613 # Delete everything reachable from the directory named in 'topdir'.
614 # CAUTION: This is dangerous!
615 if _check_unsafe_delete_path(topdir):
616 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
617 for root, dirs, files in os.walk(topdir, topdown = False):
618 for name in files:
619 os.remove(os.path.join(root, name))
620 for name in dirs:
621 if os.path.islink(os.path.join(root, name)):
622 os.remove(os.path.join(root, name))
623 else:
624 os.rmdir(os.path.join(root, name))
625 os.rmdir(topdir)
626
627#
628# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
629# but thats possibly insane and suffixes is probably going to be small
630#
631def prune_suffix(var, suffixes, d):
632 # See if var ends with any of the suffixes listed and
633 # remove it if found
634 for suffix in suffixes:
635 if var.endswith(suffix):
636 return var.replace(suffix, "")
637 return var
638
639def mkdirhier(directory):
640 """Create a directory like 'mkdir -p', but does not complain if
641 directory already exists like os.makedirs
642 """
643
644 try:
645 os.makedirs(directory)
646 except OSError as e:
647 if e.errno != errno.EEXIST:
648 raise e
649
650def movefile(src, dest, newmtime = None, sstat = None):
651 """Moves a file from src to dest, preserving all permissions and
652 attributes; mtime will be preserved even when moving across
653 filesystems. Returns true on success and false on failure. Move is
654 atomic.
655 """
656
657 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
658 try:
659 if not sstat:
660 sstat = os.lstat(src)
661 except Exception as e:
662 print("movefile: Stating source file failed...", e)
663 return None
664
665 destexists = 1
666 try:
667 dstat = os.lstat(dest)
668 except:
669 dstat = os.lstat(os.path.dirname(dest))
670 destexists = 0
671
672 if destexists:
673 if stat.S_ISLNK(dstat[stat.ST_MODE]):
674 try:
675 os.unlink(dest)
676 destexists = 0
677 except Exception as e:
678 pass
679
680 if stat.S_ISLNK(sstat[stat.ST_MODE]):
681 try:
682 target = os.readlink(src)
683 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
684 os.unlink(dest)
685 os.symlink(target, dest)
686 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
687 os.unlink(src)
688 return os.lstat(dest)
689 except Exception as e:
690 print("movefile: failed to properly create symlink:", dest, "->", target, e)
691 return None
692
693 renamefailed = 1
694 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
695 try:
696 os.rename(src, dest)
697 renamefailed = 0
698 except Exception as e:
699 if e[0] != errno.EXDEV:
700 # Some random error.
701 print("movefile: Failed to move", src, "to", dest, e)
702 return None
703 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
704
705 if renamefailed:
706 didcopy = 0
707 if stat.S_ISREG(sstat[stat.ST_MODE]):
708 try: # For safety copy then move it over.
709 shutil.copyfile(src, dest + "#new")
710 os.rename(dest + "#new", dest)
711 didcopy = 1
712 except Exception as e:
713 print('movefile: copy', src, '->', dest, 'failed.', e)
714 return None
715 else:
716 #we don't yet handle special, so we need to fall back to /bin/mv
717 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
718 if a[0] != 0:
719 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
720 return None # failure
721 try:
722 if didcopy:
723 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
724 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
725 os.unlink(src)
726 except Exception as e:
727 print("movefile: Failed to chown/chmod/unlink", dest, e)
728 return None
729
730 if newmtime:
731 os.utime(dest, (newmtime, newmtime))
732 else:
733 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
734 newmtime = sstat[stat.ST_MTIME]
735 return newmtime
736
737def copyfile(src, dest, newmtime = None, sstat = None):
738 """
739 Copies a file from src to dest, preserving all permissions and
740 attributes; mtime will be preserved even when moving across
741 filesystems. Returns true on success and false on failure.
742 """
743 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
744 try:
745 if not sstat:
746 sstat = os.lstat(src)
747 except Exception as e:
748 logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
749 return False
750
751 destexists = 1
752 try:
753 dstat = os.lstat(dest)
754 except:
755 dstat = os.lstat(os.path.dirname(dest))
756 destexists = 0
757
758 if destexists:
759 if stat.S_ISLNK(dstat[stat.ST_MODE]):
760 try:
761 os.unlink(dest)
762 destexists = 0
763 except Exception as e:
764 pass
765
766 if stat.S_ISLNK(sstat[stat.ST_MODE]):
767 try:
768 target = os.readlink(src)
769 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
770 os.unlink(dest)
771 os.symlink(target, dest)
772 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
773 return os.lstat(dest)
774 except Exception as e:
775 logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
776 return False
777
778 if stat.S_ISREG(sstat[stat.ST_MODE]):
779 try:
780 srcchown = False
781 if not os.access(src, os.R_OK):
782 # Make sure we can read it
783 srcchown = True
784 os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
785
786 # For safety copy then move it over.
787 shutil.copyfile(src, dest + "#new")
788 os.rename(dest + "#new", dest)
789 except Exception as e:
790 logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
791 return False
792 finally:
793 if srcchown:
794 os.chmod(src, sstat[stat.ST_MODE])
795 os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
796
797 else:
798 #we don't yet handle special, so we need to fall back to /bin/mv
799 a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
800 if a[0] != 0:
801 logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
802 return False # failure
803 try:
804 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
805 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
806 except Exception as e:
807 logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
808 return False
809
810 if newmtime:
811 os.utime(dest, (newmtime, newmtime))
812 else:
813 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
814 newmtime = sstat[stat.ST_MTIME]
815 return newmtime
816
817def which(path, item, direction = 0, history = False):
818 """
819 Locate a file in a PATH
820 """
821
822 hist = []
823 paths = (path or "").split(':')
824 if direction != 0:
825 paths.reverse()
826
827 for p in paths:
828 next = os.path.join(p, item)
829 hist.append(next)
830 if os.path.exists(next):
831 if not os.path.isabs(next):
832 next = os.path.abspath(next)
833 if history:
834 return next, hist
835 return next
836
837 if history:
838 return "", hist
839 return ""
840
841def to_boolean(string, default=None):
842 if not string:
843 return default
844
845 normalized = string.lower()
846 if normalized in ("y", "yes", "1", "true"):
847 return True
848 elif normalized in ("n", "no", "0", "false"):
849 return False
850 else:
851 raise ValueError("Invalid value for to_boolean: %s" % string)
852
853def contains(variable, checkvalues, truevalue, falsevalue, d):
854 val = d.getVar(variable, True)
855 if not val:
856 return falsevalue
857 val = set(val.split())
858 if isinstance(checkvalues, basestring):
859 checkvalues = set(checkvalues.split())
860 else:
861 checkvalues = set(checkvalues)
862 if checkvalues.issubset(val):
863 return truevalue
864 return falsevalue
865
866def contains_any(variable, checkvalues, truevalue, falsevalue, d):
867 val = d.getVar(variable, True)
868 if not val:
869 return falsevalue
870 val = set(val.split())
871 if isinstance(checkvalues, basestring):
872 checkvalues = set(checkvalues.split())
873 else:
874 checkvalues = set(checkvalues)
875 if checkvalues & val:
876 return truevalue
877 return falsevalue
878
879def cpu_count():
880 return multiprocessing.cpu_count()
881
882def nonblockingfd(fd):
883 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
884
885def process_profilelog(fn):
886 pout = open(fn + '.processed', 'w')
887
888 import pstats
889 p = pstats.Stats(fn, stream=pout)
890 p.sort_stats('time')
891 p.print_stats()
892 p.print_callers()
893 p.sort_stats('cumulative')
894 p.print_stats()
895
896 pout.flush()
897 pout.close()
898
899#
900# Was present to work around multiprocessing pool bugs in python < 2.7.3
901#
902def multiprocessingpool(*args, **kwargs):
903
904 import multiprocessing.pool
905 #import multiprocessing.util
906 #multiprocessing.util.log_to_stderr(10)
907 # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
908 # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
909 def wrapper(func):
910 def wrap(self, timeout=None):
911 return func(self, timeout=timeout if timeout is not None else 1e100)
912 return wrap
913 multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
914
915 return multiprocessing.Pool(*args, **kwargs)
916