summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/COW.py171
-rw-r--r--bitbake/lib/bb/__init__.py92
-rw-r--r--bitbake/lib/bb/build.py64
-rw-r--r--bitbake/lib/bb/cache.py99
-rw-r--r--bitbake/lib/bb/command.py19
-rw-r--r--bitbake/lib/bb/cooker.py182
-rw-r--r--bitbake/lib/bb/daemonize.py381
-rw-r--r--bitbake/lib/bb/data.py417
-rw-r--r--bitbake/lib/bb/data_smart.py158
-rw-r--r--bitbake/lib/bb/event.py33
-rw-r--r--bitbake/lib/bb/fetch/__init__.py134
-rw-r--r--bitbake/lib/bb/fetch/bzr.py7
-rw-r--r--bitbake/lib/bb/fetch/cvs.py6
-rw-r--r--bitbake/lib/bb/fetch/git.py13
-rw-r--r--bitbake/lib/bb/fetch/hg.py7
-rw-r--r--bitbake/lib/bb/fetch/local.py9
-rw-r--r--bitbake/lib/bb/fetch/osc.py10
-rw-r--r--bitbake/lib/bb/fetch/perforce.py41
-rw-r--r--bitbake/lib/bb/fetch/repo.py3
-rw-r--r--bitbake/lib/bb/fetch/ssh.py2
-rw-r--r--bitbake/lib/bb/fetch/svn.py2
-rw-r--r--bitbake/lib/bb/fetch/wget.py35
-rw-r--r--bitbake/lib/bb/methodpool.py10
-rw-r--r--bitbake/lib/bb/msg.py108
-rw-r--r--bitbake/lib/bb/parse/__init__.py19
-rw-r--r--bitbake/lib/bb/parse/ast.py60
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py33
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py14
-rw-r--r--bitbake/lib/bb/parse/parse_py/__init__.py8
-rw-r--r--bitbake/lib/bb/persist_data.py41
-rw-r--r--bitbake/lib/bb/providers.py16
-rw-r--r--bitbake/lib/bb/runqueue.py124
-rw-r--r--bitbake/lib/bb/server/none.py3
-rw-r--r--bitbake/lib/bb/server/xmlrpc.py15
-rw-r--r--bitbake/lib/bb/shell.py166
-rw-r--r--bitbake/lib/bb/taskdata.py65
-rw-r--r--bitbake/lib/bb/ui/__init__.py1
-rw-r--r--bitbake/lib/bb/ui/crumbs/__init__.py1
-rw-r--r--bitbake/lib/bb/ui/crumbs/buildmanager.py90
-rw-r--r--bitbake/lib/bb/ui/crumbs/runningbuild.py30
-rw-r--r--bitbake/lib/bb/ui/depexp.py21
-rw-r--r--bitbake/lib/bb/ui/goggle.py21
-rw-r--r--bitbake/lib/bb/ui/knotty.py92
-rw-r--r--bitbake/lib/bb/ui/ncurses.py31
-rw-r--r--bitbake/lib/bb/ui/puccho.py130
-rw-r--r--bitbake/lib/bb/ui/uievent.py7
-rw-r--r--bitbake/lib/bb/utils.py396
47 files changed, 1599 insertions, 1788 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
index ca206cf4b4..6917ec378a 100644
--- a/bitbake/lib/bb/COW.py
+++ b/bitbake/lib/bb/COW.py
@@ -3,7 +3,7 @@
3# 3#
4# This is a copy on write dictionary and set which abuses classes to try and be nice and fast. 4# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
5# 5#
6# Copyright (C) 2006 Tim Amsell 6# Copyright (C) 2006 Tim Amsell
7# 7#
8# This program is free software; you can redistribute it and/or modify 8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as 9# it under the terms of the GNU General Public License version 2 as
@@ -18,29 +18,31 @@
18# with this program; if not, write to the Free Software Foundation, Inc., 18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20# 20#
21#Please Note: 21#Please Note:
22# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. 22# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
23# Assign a file to __warn__ to get warnings about slow operations. 23# Assign a file to __warn__ to get warnings about slow operations.
24# 24#
25 25
26from __future__ import print_function
26import copy 27import copy
27import types 28import types
28types.ImmutableTypes = tuple([ \ 29ImmutableTypes = (
29 types.BooleanType, \ 30 types.NoneType,
30 types.ComplexType, \ 31 bool,
31 types.FloatType, \ 32 complex,
32 types.IntType, \ 33 float,
33 types.LongType, \ 34 int,
34 types.NoneType, \ 35 long,
35 types.TupleType, \ 36 tuple,
36 frozenset] + \ 37 frozenset,
37 list(types.StringTypes)) 38 basestring
39)
38 40
39MUTABLE = "__mutable__" 41MUTABLE = "__mutable__"
40 42
41class COWMeta(type): 43class COWMeta(type):
42 pass 44 pass
43 45
44class COWDictMeta(COWMeta): 46class COWDictMeta(COWMeta):
45 __warn__ = False 47 __warn__ = False
46 __hasmutable__ = False 48 __hasmutable__ = False
@@ -59,12 +61,12 @@ class COWDictMeta(COWMeta):
59 __call__ = cow 61 __call__ = cow
60 62
61 def __setitem__(cls, key, value): 63 def __setitem__(cls, key, value):
62 if not isinstance(value, types.ImmutableTypes): 64 if not isinstance(value, ImmutableTypes):
63 if not isinstance(value, COWMeta): 65 if not isinstance(value, COWMeta):
64 cls.__hasmutable__ = True 66 cls.__hasmutable__ = True
65 key += MUTABLE 67 key += MUTABLE
66 setattr(cls, key, value) 68 setattr(cls, key, value)
67 69
68 def __getmutable__(cls, key, readonly=False): 70 def __getmutable__(cls, key, readonly=False):
69 nkey = key + MUTABLE 71 nkey = key + MUTABLE
70 try: 72 try:
@@ -77,10 +79,10 @@ class COWDictMeta(COWMeta):
77 return value 79 return value
78 80
79 if not cls.__warn__ is False and not isinstance(value, COWMeta): 81 if not cls.__warn__ is False and not isinstance(value, COWMeta):
80 print >> cls.__warn__, "Warning: Doing a copy because %s is a mutable type." % key 82 print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__)
81 try: 83 try:
82 value = value.copy() 84 value = value.copy()
83 except AttributeError, e: 85 except AttributeError as e:
84 value = copy.copy(value) 86 value = copy.copy(value)
85 setattr(cls, nkey, value) 87 setattr(cls, nkey, value)
86 return value 88 return value
@@ -98,13 +100,13 @@ class COWDictMeta(COWMeta):
98 value = getattr(cls, key) 100 value = getattr(cls, key)
99 except AttributeError: 101 except AttributeError:
100 value = cls.__getmutable__(key, readonly) 102 value = cls.__getmutable__(key, readonly)
101 103
102 # This is for values which have been deleted 104 # This is for values which have been deleted
103 if value is cls.__marker__: 105 if value is cls.__marker__:
104 raise AttributeError("key %s does not exist." % key) 106 raise AttributeError("key %s does not exist." % key)
105 107
106 return value 108 return value
107 except AttributeError, e: 109 except AttributeError as e:
108 if not default is cls.__getmarker__: 110 if not default is cls.__getmarker__:
109 return default 111 return default
110 112
@@ -118,6 +120,9 @@ class COWDictMeta(COWMeta):
118 key += MUTABLE 120 key += MUTABLE
119 delattr(cls, key) 121 delattr(cls, key)
120 122
123 def __contains__(cls, key):
124 return cls.has_key(key)
125
121 def has_key(cls, key): 126 def has_key(cls, key):
122 value = cls.__getreadonly__(key, cls.__marker__) 127 value = cls.__getreadonly__(key, cls.__marker__)
123 if value is cls.__marker__: 128 if value is cls.__marker__:
@@ -127,7 +132,7 @@ class COWDictMeta(COWMeta):
127 def iter(cls, type, readonly=False): 132 def iter(cls, type, readonly=False):
128 for key in dir(cls): 133 for key in dir(cls):
129 if key.startswith("__"): 134 if key.startswith("__"):
130 continue 135 continue
131 136
132 if key.endswith(MUTABLE): 137 if key.endswith(MUTABLE):
133 key = key[:-len(MUTABLE)] 138 key = key[:-len(MUTABLE)]
@@ -153,11 +158,11 @@ class COWDictMeta(COWMeta):
153 return cls.iter("keys") 158 return cls.iter("keys")
154 def itervalues(cls, readonly=False): 159 def itervalues(cls, readonly=False):
155 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: 160 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
156 print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True." 161 print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
157 return cls.iter("values", readonly) 162 return cls.iter("values", readonly)
158 def iteritems(cls, readonly=False): 163 def iteritems(cls, readonly=False):
159 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False: 164 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
160 print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True." 165 print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
161 return cls.iter("items", readonly) 166 return cls.iter("items", readonly)
162 167
163class COWSetMeta(COWDictMeta): 168class COWSetMeta(COWDictMeta):
@@ -176,13 +181,13 @@ class COWSetMeta(COWDictMeta):
176 181
177 def remove(cls, value): 182 def remove(cls, value):
178 COWDictMeta.__delitem__(cls, repr(hash(value))) 183 COWDictMeta.__delitem__(cls, repr(hash(value)))
179 184
180 def __in__(cls, value): 185 def __in__(cls, value):
181 return COWDictMeta.has_key(repr(hash(value))) 186 return COWDictMeta.has_key(repr(hash(value)))
182 187
183 def iterkeys(cls): 188 def iterkeys(cls):
184 raise TypeError("sets don't have keys") 189 raise TypeError("sets don't have keys")
185 190
186 def iteritems(cls): 191 def iteritems(cls):
187 raise TypeError("sets don't have 'items'") 192 raise TypeError("sets don't have 'items'")
188 193
@@ -199,120 +204,120 @@ if __name__ == "__main__":
199 import sys 204 import sys
200 COWDictBase.__warn__ = sys.stderr 205 COWDictBase.__warn__ = sys.stderr
201 a = COWDictBase() 206 a = COWDictBase()
202 print "a", a 207 print("a", a)
203 208
204 a['a'] = 'a' 209 a['a'] = 'a'
205 a['b'] = 'b' 210 a['b'] = 'b'
206 a['dict'] = {} 211 a['dict'] = {}
207 212
208 b = a.copy() 213 b = a.copy()
209 print "b", b 214 print("b", b)
210 b['c'] = 'b' 215 b['c'] = 'b'
211 216
212 print 217 print()
213 218
214 print "a", a 219 print("a", a)
215 for x in a.iteritems(): 220 for x in a.iteritems():
216 print x 221 print(x)
217 print "--" 222 print("--")
218 print "b", b 223 print("b", b)
219 for x in b.iteritems(): 224 for x in b.iteritems():
220 print x 225 print(x)
221 print 226 print()
222 227
223 b['dict']['a'] = 'b' 228 b['dict']['a'] = 'b'
224 b['a'] = 'c' 229 b['a'] = 'c'
225 230
226 print "a", a 231 print("a", a)
227 for x in a.iteritems(): 232 for x in a.iteritems():
228 print x 233 print(x)
229 print "--" 234 print("--")
230 print "b", b 235 print("b", b)
231 for x in b.iteritems(): 236 for x in b.iteritems():
232 print x 237 print(x)
233 print 238 print()
234 239
235 try: 240 try:
236 b['dict2'] 241 b['dict2']
237 except KeyError, e: 242 except KeyError as e:
238 print "Okay!" 243 print("Okay!")
239 244
240 a['set'] = COWSetBase() 245 a['set'] = COWSetBase()
241 a['set'].add("o1") 246 a['set'].add("o1")
242 a['set'].add("o1") 247 a['set'].add("o1")
243 a['set'].add("o2") 248 a['set'].add("o2")
244 249
245 print "a", a 250 print("a", a)
246 for x in a['set'].itervalues(): 251 for x in a['set'].itervalues():
247 print x 252 print(x)
248 print "--" 253 print("--")
249 print "b", b 254 print("b", b)
250 for x in b['set'].itervalues(): 255 for x in b['set'].itervalues():
251 print x 256 print(x)
252 print 257 print()
253 258
254 b['set'].add('o3') 259 b['set'].add('o3')
255 260
256 print "a", a 261 print("a", a)
257 for x in a['set'].itervalues(): 262 for x in a['set'].itervalues():
258 print x 263 print(x)
259 print "--" 264 print("--")
260 print "b", b 265 print("b", b)
261 for x in b['set'].itervalues(): 266 for x in b['set'].itervalues():
262 print x 267 print(x)
263 print 268 print()
264 269
265 a['set2'] = set() 270 a['set2'] = set()
266 a['set2'].add("o1") 271 a['set2'].add("o1")
267 a['set2'].add("o1") 272 a['set2'].add("o1")
268 a['set2'].add("o2") 273 a['set2'].add("o2")
269 274
270 print "a", a 275 print("a", a)
271 for x in a.iteritems(): 276 for x in a.iteritems():
272 print x 277 print(x)
273 print "--" 278 print("--")
274 print "b", b 279 print("b", b)
275 for x in b.iteritems(readonly=True): 280 for x in b.iteritems(readonly=True):
276 print x 281 print(x)
277 print 282 print()
278 283
279 del b['b'] 284 del b['b']
280 try: 285 try:
281 print b['b'] 286 print(b['b'])
282 except KeyError: 287 except KeyError:
283 print "Yay! deleted key raises error" 288 print("Yay! deleted key raises error")
284 289
285 if b.has_key('b'): 290 if b.has_key('b'):
286 print "Boo!" 291 print("Boo!")
287 else: 292 else:
288 print "Yay - has_key with delete works!" 293 print("Yay - has_key with delete works!")
289 294
290 print "a", a 295 print("a", a)
291 for x in a.iteritems(): 296 for x in a.iteritems():
292 print x 297 print(x)
293 print "--" 298 print("--")
294 print "b", b 299 print("b", b)
295 for x in b.iteritems(readonly=True): 300 for x in b.iteritems(readonly=True):
296 print x 301 print(x)
297 print 302 print()
298 303
299 b.__revertitem__('b') 304 b.__revertitem__('b')
300 305
301 print "a", a 306 print("a", a)
302 for x in a.iteritems(): 307 for x in a.iteritems():
303 print x 308 print(x)
304 print "--" 309 print("--")
305 print "b", b 310 print("b", b)
306 for x in b.iteritems(readonly=True): 311 for x in b.iteritems(readonly=True):
307 print x 312 print(x)
308 print 313 print()
309 314
310 b.__revertitem__('dict') 315 b.__revertitem__('dict')
311 print "a", a 316 print("a", a)
312 for x in a.iteritems(): 317 for x in a.iteritems():
313 print x 318 print(x)
314 print "--" 319 print("--")
315 print "b", b 320 print("b", b)
316 for x in b.iteritems(readonly=True): 321 for x in b.iteritems(readonly=True):
317 print x 322 print(x)
318 print 323 print()
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index c7cd0f62dd..88adfc1dfa 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -21,39 +21,14 @@
21# with this program; if not, write to the Free Software Foundation, Inc., 21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23 23
24__version__ = "1.9.0" 24__version__ = "1.11.0"
25 25
26__all__ = [ 26import sys
27 27if sys.version_info < (2, 6, 0):
28 "debug", 28 raise RuntimeError("Sorry, python 2.6.0 or later is required for this version of bitbake")
29 "note", 29
30 "error", 30import os
31 "fatal", 31import bb.msg
32
33 "mkdirhier",
34 "movefile",
35 "vercmp",
36
37# fetch
38 "decodeurl",
39 "encodeurl",
40
41# modules
42 "parse",
43 "data",
44 "command",
45 "event",
46 "build",
47 "fetch",
48 "manifest",
49 "methodpool",
50 "cache",
51 "runqueue",
52 "taskdata",
53 "providers",
54 ]
55
56import sys, os, types, re, string
57 32
58if "BBDEBUG" in os.environ: 33if "BBDEBUG" in os.environ:
59 level = int(os.environ["BBDEBUG"]) 34 level = int(os.environ["BBDEBUG"])
@@ -81,14 +56,45 @@ def fatal(*args):
81 bb.msg.fatal(None, ''.join(args)) 56 bb.msg.fatal(None, ''.join(args))
82 57
83 58
84# For compatibility 59def deprecated(func, name = None, advice = ""):
85from bb.fetch import MalformedUrl, encodeurl, decodeurl 60 """This is a decorator which can be used to mark functions
86from bb.data import VarExpandError 61 as deprecated. It will result in a warning being emmitted
87from bb.utils import mkdirhier, movefile, copyfile, which 62 when the function is used."""
88from bb.utils import vercmp 63 import warnings
64
65 if advice:
66 advice = ": %s" % advice
67 if name is None:
68 name = func.__name__
69
70 def newFunc(*args, **kwargs):
71 warnings.warn("Call to deprecated function %s%s." % (name,
72 advice),
73 category = PendingDeprecationWarning,
74 stacklevel = 2)
75 return func(*args, **kwargs)
76 newFunc.__name__ = func.__name__
77 newFunc.__doc__ = func.__doc__
78 newFunc.__dict__.update(func.__dict__)
79 return newFunc
89 80
90 81# For compatibility
91if __name__ == "__main__": 82def deprecate_import(current, modulename, fromlist, renames = None):
92 import doctest, bb 83 """Import objects from one module into another, wrapping them with a DeprecationWarning"""
93 bb.msg.set_debug_level(0) 84 import sys
94 doctest.testmod(bb) 85
86 module = __import__(modulename, fromlist = fromlist)
87 for position, objname in enumerate(fromlist):
88 obj = getattr(module, objname)
89 newobj = deprecated(obj, "{0}.{1}".format(current, objname),
90 "Please use {0}.{1} instead".format(modulename, objname))
91 if renames:
92 newname = renames[position]
93 else:
94 newname = objname
95
96 setattr(sys.modules[current], newname, newobj)
97
98deprecate_import(__name__, "bb.fetch", ("MalformedUrl", "encodeurl", "decodeurl"))
99deprecate_import(__name__, "bb.utils", ("mkdirhier", "movefile", "copyfile", "which"))
100deprecate_import(__name__, "bb.utils", ["vercmp_string"], ["vercmp"])
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 43dbfc1363..1f4107fb65 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -27,8 +27,9 @@
27 27
28from bb import data, event, mkdirhier, utils 28from bb import data, event, mkdirhier, utils
29import bb, os, sys 29import bb, os, sys
30import bb.utils
30 31
31# When we execute a python function we'd like certain things 32# When we execute a python function we'd like certain things
32# in all namespaces, hence we add them to __builtins__ 33# in all namespaces, hence we add them to __builtins__
33# If we do not do this and use the exec globals, they will 34# If we do not do this and use the exec globals, they will
34# not be available to subfunctions. 35# not be available to subfunctions.
@@ -98,18 +99,19 @@ def exec_func(func, d, dirs = None):
98 99
99 ispython = flags['python'] 100 ispython = flags['python']
100 101
101 cleandirs = (data.expand(flags['cleandirs'], d) or "").split() 102 cleandirs = flags['cleandirs']
102 for cdir in cleandirs: 103 if cleandirs:
103 os.system("rm -rf %s" % cdir) 104 for cdir in data.expand(cleandirs, d).split():
105 os.system("rm -rf %s" % cdir)
104 106
105 if dirs: 107 if dirs is None:
106 dirs = data.expand(dirs, d) 108 dirs = flags['dirs']
107 else: 109 if dirs:
108 dirs = (data.expand(flags['dirs'], d) or "").split() 110 dirs = data.expand(dirs, d).split()
109 for adir in dirs:
110 mkdirhier(adir)
111 111
112 if len(dirs) > 0: 112 if dirs:
113 for adir in dirs:
114 bb.utils.mkdirhier(adir)
113 adir = dirs[-1] 115 adir = dirs[-1]
114 else: 116 else:
115 adir = data.getVar('B', d, 1) 117 adir = data.getVar('B', d, 1)
@@ -123,8 +125,8 @@ def exec_func(func, d, dirs = None):
123 # Setup logfiles 125 # Setup logfiles
124 t = data.getVar('T', d, 1) 126 t = data.getVar('T', d, 1)
125 if not t: 127 if not t:
126 bb.msg.fatal(bb.msg.domain.Build, "T not set") 128 raise SystemExit("T variable not set, unable to build")
127 mkdirhier(t) 129 bb.utils.mkdirhier(t)
128 logfile = "%s/log.%s.%s" % (t, func, str(os.getpid())) 130 logfile = "%s/log.%s.%s" % (t, func, str(os.getpid()))
129 runfile = "%s/run.%s.%s" % (t, func, str(os.getpid())) 131 runfile = "%s/run.%s.%s" % (t, func, str(os.getpid()))
130 132
@@ -139,7 +141,7 @@ def exec_func(func, d, dirs = None):
139 so = os.popen("tee \"%s\"" % logfile, "w") 141 so = os.popen("tee \"%s\"" % logfile, "w")
140 else: 142 else:
141 so = file(logfile, 'w') 143 so = file(logfile, 'w')
142 except OSError, e: 144 except OSError as e:
143 bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e) 145 bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e)
144 pass 146 pass
145 147
@@ -156,9 +158,10 @@ def exec_func(func, d, dirs = None):
156 os.dup2(se.fileno(), ose[1]) 158 os.dup2(se.fileno(), ose[1])
157 159
158 locks = [] 160 locks = []
159 lockfiles = (data.expand(flags['lockfiles'], d) or "").split() 161 lockfiles = flags['lockfiles']
160 for lock in lockfiles: 162 if lockfiles:
161 locks.append(bb.utils.lockfile(lock)) 163 for lock in data.expand(lockfiles, d).split():
164 locks.append(bb.utils.lockfile(lock))
162 165
163 try: 166 try:
164 # Run the function 167 # Run the function
@@ -200,26 +203,22 @@ def exec_func(func, d, dirs = None):
200 203
201def exec_func_python(func, d, runfile, logfile): 204def exec_func_python(func, d, runfile, logfile):
202 """Execute a python BB 'function'""" 205 """Execute a python BB 'function'"""
203 import re, os
204 206
205 bbfile = bb.data.getVar('FILE', d, 1) 207 bbfile = bb.data.getVar('FILE', d, 1)
206 tmp = "def " + func + "():\n%s" % data.getVar(func, d) 208 tmp = "def " + func + "(d):\n%s" % data.getVar(func, d)
207 tmp += '\n' + func + '()' 209 tmp += '\n' + func + '(d)'
208 210
209 f = open(runfile, "w") 211 f = open(runfile, "w")
210 f.write(tmp) 212 f.write(tmp)
211 comp = utils.better_compile(tmp, func, bbfile) 213 comp = utils.better_compile(tmp, func, bbfile)
212 g = {} # globals
213 g['d'] = d
214 try: 214 try:
215 utils.better_exec(comp, g, tmp, bbfile) 215 utils.better_exec(comp, {"d": d}, tmp, bbfile)
216 except: 216 except:
217 (t,value,tb) = sys.exc_info() 217 (t, value, tb) = sys.exc_info()
218 218
219 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]: 219 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
220 raise 220 raise
221 bb.msg.error(bb.msg.domain.Build, "Function %s failed" % func) 221 raise FuncFailed("Function %s failed" % func, logfile)
222 raise FuncFailed("function %s failed" % func, logfile)
223 222
224def exec_func_shell(func, d, runfile, logfile, flags): 223def exec_func_shell(func, d, runfile, logfile, flags):
225 """Execute a shell BB 'function' Returns true if execution was successful. 224 """Execute a shell BB 'function' Returns true if execution was successful.
@@ -248,7 +247,6 @@ def exec_func_shell(func, d, runfile, logfile, flags):
248 f.close() 247 f.close()
249 os.chmod(runfile, 0775) 248 os.chmod(runfile, 0775)
250 if not func: 249 if not func:
251 bb.msg.error(bb.msg.domain.Build, "Function not specified")
252 raise FuncFailed("Function not specified for exec_func_shell") 250 raise FuncFailed("Function not specified for exec_func_shell")
253 251
254 # execute function 252 # execute function
@@ -262,7 +260,6 @@ def exec_func_shell(func, d, runfile, logfile, flags):
262 if ret == 0: 260 if ret == 0:
263 return 261 return
264 262
265 bb.msg.error(bb.msg.domain.Build, "Function %s failed" % func)
266 raise FuncFailed("function %s failed" % func, logfile) 263 raise FuncFailed("function %s failed" % func, logfile)
267 264
268 265
@@ -287,7 +284,7 @@ def exec_task(task, d):
287 event.fire(TaskStarted(task, localdata), localdata) 284 event.fire(TaskStarted(task, localdata), localdata)
288 exec_func(task, localdata) 285 exec_func(task, localdata)
289 event.fire(TaskSucceeded(task, localdata), localdata) 286 event.fire(TaskSucceeded(task, localdata), localdata)
290 except FuncFailed, message: 287 except FuncFailed as message:
291 # Try to extract the optional logfile 288 # Try to extract the optional logfile
292 try: 289 try:
293 (msg, logfile) = message 290 (msg, logfile) = message
@@ -305,8 +302,8 @@ def exec_task(task, d):
305 302
306def extract_stamp(d, fn): 303def extract_stamp(d, fn):
307 """ 304 """
308 Extracts stamp format which is either a data dictonary (fn unset) 305 Extracts stamp format which is either a data dictonary (fn unset)
309 or a dataCache entry (fn set). 306 or a dataCache entry (fn set).
310 """ 307 """
311 if fn: 308 if fn:
312 return d.stamp[fn] 309 return d.stamp[fn]
@@ -323,7 +320,7 @@ def stamp_internal(task, d, file_name):
323 if not stamp: 320 if not stamp:
324 return 321 return
325 stamp = "%s.%s" % (stamp, task) 322 stamp = "%s.%s" % (stamp, task)
326 mkdirhier(os.path.dirname(stamp)) 323 bb.utils.mkdirhier(os.path.dirname(stamp))
327 # Remove the file and recreate to force timestamp 324 # Remove the file and recreate to force timestamp
328 # change on broken NFS filesystems 325 # change on broken NFS filesystems
329 if os.access(stamp, os.F_OK): 326 if os.access(stamp, os.F_OK):
@@ -363,7 +360,7 @@ def add_tasks(tasklist, d):
363 if not task in task_deps['tasks']: 360 if not task in task_deps['tasks']:
364 task_deps['tasks'].append(task) 361 task_deps['tasks'].append(task)
365 362
366 flags = data.getVarFlags(task, d) 363 flags = data.getVarFlags(task, d)
367 def getTask(name): 364 def getTask(name):
368 if not name in task_deps: 365 if not name in task_deps:
369 task_deps[name] = {} 366 task_deps[name] = {}
@@ -389,4 +386,3 @@ def remove_task(task, kill, d):
389 If kill is 1, also remove tasks that depend on this task.""" 386 If kill is 1, also remove tasks that depend on this task."""
390 387
391 data.delVarFlag(task, 'task', d) 388 data.delVarFlag(task, 'task', d)
392
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 1f180012e0..da4546640a 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -28,7 +28,7 @@
28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
29 29
30 30
31import os, re 31import os
32import bb.data 32import bb.data
33import bb.utils 33import bb.utils
34 34
@@ -44,10 +44,10 @@ class Cache:
44 """ 44 """
45 BitBake Cache implementation 45 BitBake Cache implementation
46 """ 46 """
47 def __init__(self, cooker): 47 def __init__(self, data):
48 48
49 49
50 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True) 50 self.cachedir = bb.data.getVar("CACHE", data, True)
51 self.clean = {} 51 self.clean = {}
52 self.checked = {} 52 self.checked = {}
53 self.depends_cache = {} 53 self.depends_cache = {}
@@ -61,30 +61,28 @@ class Cache:
61 return 61 return
62 62
63 self.has_cache = True 63 self.has_cache = True
64 self.cachefile = os.path.join(self.cachedir,"bb_cache.dat") 64 self.cachefile = os.path.join(self.cachedir, "bb_cache.dat")
65 65
66 bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir) 66 bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
67 try: 67 bb.utils.mkdirhier(self.cachedir)
68 os.stat( self.cachedir )
69 except OSError:
70 bb.mkdirhier( self.cachedir )
71 68
72 # If any of configuration.data's dependencies are newer than the 69 # If any of configuration.data's dependencies are newer than the
73 # cache there isn't even any point in loading it... 70 # cache there isn't even any point in loading it...
74 newest_mtime = 0 71 newest_mtime = 0
75 deps = bb.data.getVar("__depends", cooker.configuration.data, True) 72 deps = bb.data.getVar("__depends", data)
76 for f,old_mtime in deps: 73
77 if old_mtime > newest_mtime: 74 old_mtimes = [old_mtime for f, old_mtime in deps]
78 newest_mtime = old_mtime 75 old_mtimes.append(newest_mtime)
76 newest_mtime = max(old_mtimes)
79 77
80 if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime: 78 if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime:
81 try: 79 try:
82 p = pickle.Unpickler(file(self.cachefile, "rb")) 80 p = pickle.Unpickler(file(self.cachefile, "rb"))
83 self.depends_cache, version_data = p.load() 81 self.depends_cache, version_data = p.load()
84 if version_data['CACHE_VER'] != __cache_version__: 82 if version_data['CACHE_VER'] != __cache_version__:
85 raise ValueError, 'Cache Version Mismatch' 83 raise ValueError('Cache Version Mismatch')
86 if version_data['BITBAKE_VER'] != bb.__version__: 84 if version_data['BITBAKE_VER'] != bb.__version__:
87 raise ValueError, 'Bitbake Version Mismatch' 85 raise ValueError('Bitbake Version Mismatch')
88 except EOFError: 86 except EOFError:
89 bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...") 87 bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...")
90 self.depends_cache = {} 88 self.depends_cache = {}
@@ -92,27 +90,23 @@ class Cache:
92 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") 90 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
93 self.depends_cache = {} 91 self.depends_cache = {}
94 else: 92 else:
95 try: 93 if os.path.isfile(self.cachefile):
96 os.stat( self.cachefile )
97 bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...") 94 bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...")
98 except OSError:
99 pass
100 95
101 def getVar(self, var, fn, exp = 0): 96 def getVar(self, var, fn, exp = 0):
102 """ 97 """
103 Gets the value of a variable 98 Gets the value of a variable
104 (similar to getVar in the data class) 99 (similar to getVar in the data class)
105 100
106 There are two scenarios: 101 There are two scenarios:
107 1. We have cached data - serve from depends_cache[fn] 102 1. We have cached data - serve from depends_cache[fn]
108 2. We're learning what data to cache - serve from data 103 2. We're learning what data to cache - serve from data
109 backend but add a copy of the data to the cache. 104 backend but add a copy of the data to the cache.
110 """ 105 """
111 if fn in self.clean: 106 if fn in self.clean:
112 return self.depends_cache[fn][var] 107 return self.depends_cache[fn][var]
113 108
114 if not fn in self.depends_cache: 109 self.depends_cache.setdefault(fn, {})
115 self.depends_cache[fn] = {}
116 110
117 if fn != self.data_fn: 111 if fn != self.data_fn:
118 # We're trying to access data in the cache which doesn't exist 112 # We're trying to access data in the cache which doesn't exist
@@ -134,14 +128,14 @@ class Cache:
134 self.data = data 128 self.data = data
135 129
136 # Make sure __depends makes the depends_cache 130 # Make sure __depends makes the depends_cache
137 # If we're a virtual class we need to make sure all our depends are appended 131 # If we're a virtual class we need to make sure all our depends are appended
138 # to the depends of fn. 132 # to the depends of fn.
139 depends = self.getVar("__depends", virtualfn, True) or [] 133 depends = self.getVar("__depends", virtualfn) or set()
134 self.depends_cache.setdefault(fn, {})
140 if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]: 135 if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]:
141 self.depends_cache[fn]["__depends"] = depends 136 self.depends_cache[fn]["__depends"] = depends
142 for dep in depends: 137 else:
143 if dep not in self.depends_cache[fn]["__depends"]: 138 self.depends_cache[fn]["__depends"].update(depends)
144 self.depends_cache[fn]["__depends"].append(dep)
145 139
146 # Make sure the variants always make it into the cache too 140 # Make sure the variants always make it into the cache too
147 self.getVar('__VARIANTS', virtualfn, True) 141 self.getVar('__VARIANTS', virtualfn, True)
@@ -217,7 +211,7 @@ class Cache:
217 for data in bb_data: 211 for data in bb_data:
218 virtualfn = self.realfn2virtual(fn, data) 212 virtualfn = self.realfn2virtual(fn, data)
219 self.setData(virtualfn, fn, bb_data[data]) 213 self.setData(virtualfn, fn, bb_data[data])
220 if self.getVar("__SKIPPED", virtualfn, True): 214 if self.getVar("__SKIPPED", virtualfn):
221 skipped += 1 215 skipped += 1
222 bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn) 216 bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn)
223 else: 217 else:
@@ -258,11 +252,11 @@ class Cache:
258 self.remove(fn) 252 self.remove(fn)
259 return False 253 return False
260 254
261 mtime = bb.parse.cached_mtime_noerror(fn) 255 mtime = bb.parse.cached_mtime_noerror(fn)
262 256
263 # Check file still exists 257 # Check file still exists
264 if mtime == 0: 258 if mtime == 0:
265 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn) 259 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s no longer exists" % fn)
266 self.remove(fn) 260 self.remove(fn)
267 return False 261 return False
268 262
@@ -275,7 +269,7 @@ class Cache:
275 # Check dependencies are still valid 269 # Check dependencies are still valid
276 depends = self.getVar("__depends", fn, True) 270 depends = self.getVar("__depends", fn, True)
277 if depends: 271 if depends:
278 for f,old_mtime in depends: 272 for f, old_mtime in depends:
279 fmtime = bb.parse.cached_mtime_noerror(f) 273 fmtime = bb.parse.cached_mtime_noerror(f)
280 # Check if file still exists 274 # Check if file still exists
281 if old_mtime != 0 and fmtime == 0: 275 if old_mtime != 0 and fmtime == 0:
@@ -345,14 +339,14 @@ class Cache:
345 339
346 def handle_data(self, file_name, cacheData): 340 def handle_data(self, file_name, cacheData):
347 """ 341 """
348 Save data we need into the cache 342 Save data we need into the cache
349 """ 343 """
350 344
351 pn = self.getVar('PN', file_name, True) 345 pn = self.getVar('PN', file_name, True)
352 pe = self.getVar('PE', file_name, True) or "0" 346 pe = self.getVar('PE', file_name, True) or "0"
353 pv = self.getVar('PV', file_name, True) 347 pv = self.getVar('PV', file_name, True)
354 if 'SRCREVINACTION' in pv: 348 if 'SRCREVINACTION' in pv:
355 bb.note("Found SRCREVINACTION in PV (%s) or %s. Please report this bug." % (pv, file_name)) 349 bb.msg.note(1, bb.msg.domain.Cache, "Found SRCREVINACTION in PV (%s) or %s. Please report this bug." % (pv, file_name))
356 pr = self.getVar('PR', file_name, True) 350 pr = self.getVar('PR', file_name, True)
357 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0") 351 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
358 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "") 352 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
@@ -360,7 +354,7 @@ class Cache:
360 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() 354 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
361 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() 355 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
362 356
363 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True) 357 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name)
364 358
365 # build PackageName to FileName lookup table 359 # build PackageName to FileName lookup table
366 if pn not in cacheData.pkg_pn: 360 if pn not in cacheData.pkg_pn:
@@ -371,7 +365,7 @@ class Cache:
371 365
372 # build FileName to PackageName lookup table 366 # build FileName to PackageName lookup table
373 cacheData.pkg_fn[file_name] = pn 367 cacheData.pkg_fn[file_name] = pn
374 cacheData.pkg_pepvpr[file_name] = (pe,pv,pr) 368 cacheData.pkg_pepvpr[file_name] = (pe, pv, pr)
375 cacheData.pkg_dp[file_name] = dp 369 cacheData.pkg_dp[file_name] = dp
376 370
377 provides = [pn] 371 provides = [pn]
@@ -400,13 +394,13 @@ class Cache:
400 if not dep in cacheData.all_depends: 394 if not dep in cacheData.all_depends:
401 cacheData.all_depends.append(dep) 395 cacheData.all_depends.append(dep)
402 396
403 # Build reverse hash for PACKAGES, so runtime dependencies 397 # Build reverse hash for PACKAGES, so runtime dependencies
404 # can be be resolved (RDEPENDS, RRECOMMENDS etc.) 398 # can be be resolved (RDEPENDS, RRECOMMENDS etc.)
405 for package in packages: 399 for package in packages:
406 if not package in cacheData.packages: 400 if not package in cacheData.packages:
407 cacheData.packages[package] = [] 401 cacheData.packages[package] = []
408 cacheData.packages[package].append(file_name) 402 cacheData.packages[package].append(file_name)
409 rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split() 403 rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
410 404
411 for package in packages_dynamic: 405 for package in packages_dynamic:
412 if not package in cacheData.packages_dynamic: 406 if not package in cacheData.packages_dynamic:
@@ -445,38 +439,45 @@ class Cache:
445 self.getVar('__BB_DONT_CACHE', file_name, True) 439 self.getVar('__BB_DONT_CACHE', file_name, True)
446 self.getVar('__VARIANTS', file_name, True) 440 self.getVar('__VARIANTS', file_name, True)
447 441
448 def load_bbfile( self, bbfile , config): 442 def load_bbfile( self, bbfile, config):
449 """ 443 """
450 Load and parse one .bb build file 444 Load and parse one .bb build file
451 Return the data and whether parsing resulted in the file being skipped 445 Return the data and whether parsing resulted in the file being skipped
452 """ 446 """
447 chdir_back = False
453 448
454 import bb 449 from bb import data, parse
455 from bb import utils, data, parse, debug, event, fatal
456 450
457 # expand tmpdir to include this topdir 451 # expand tmpdir to include this topdir
458 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config) 452 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
459 bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) 453 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
460 oldpath = os.path.abspath(os.getcwd()) 454 oldpath = os.path.abspath(os.getcwd())
461 if bb.parse.cached_mtime_noerror(bbfile_loc): 455 parse.cached_mtime_noerror(bbfile_loc)
462 os.chdir(bbfile_loc)
463 bb_data = data.init_db(config) 456 bb_data = data.init_db(config)
457 # The ConfHandler first looks if there is a TOPDIR and if not
458 # then it would call getcwd().
459 # Previously, we chdir()ed to bbfile_loc, called the handler
460 # and finally chdir()ed back, a couple of thousand times. We now
461 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
462 if not data.getVar('TOPDIR', bb_data):
463 chdir_back = True
464 data.setVar('TOPDIR', bbfile_loc, bb_data)
464 try: 465 try:
465 bb_data = parse.handle(bbfile, bb_data) # read .bb data 466 bb_data = parse.handle(bbfile, bb_data) # read .bb data
466 os.chdir(oldpath) 467 if chdir_back: os.chdir(oldpath)
467 return bb_data 468 return bb_data
468 except: 469 except:
469 os.chdir(oldpath) 470 if chdir_back: os.chdir(oldpath)
470 raise 471 raise
471 472
472def init(cooker): 473def init(cooker):
473 """ 474 """
474 The Objective: Cache the minimum amount of data possible yet get to the 475 The Objective: Cache the minimum amount of data possible yet get to the
475 stage of building packages (i.e. tryBuild) without reparsing any .bb files. 476 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
476 477
477 To do this, we intercept getVar calls and only cache the variables we see 478 To do this, we intercept getVar calls and only cache the variables we see
478 being accessed. We rely on the cache getVar calls being made for all 479 being accessed. We rely on the cache getVar calls being made for all
479 variables bitbake might need to use to reach this stage. For each cached 480 variables bitbake might need to use to reach this stage. For each cached
480 file we need to track: 481 file we need to track:
481 482
482 * Its mtime 483 * Its mtime
@@ -486,7 +487,7 @@ def init(cooker):
486 Files causing parsing errors are evicted from the cache. 487 Files causing parsing errors are evicted from the cache.
487 488
488 """ 489 """
489 return Cache(cooker) 490 return Cache(cooker.configuration.data)
490 491
491 492
492 493
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index 06bd203c90..9a8d689e2a 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -20,7 +20,7 @@ Provide an interface to interact with the bitbake server through 'commands'
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 21
22""" 22"""
23The bitbake server takes 'commands' from its UI/commandline. 23The bitbake server takes 'commands' from its UI/commandline.
24Commands are either synchronous or asynchronous. 24Commands are either synchronous or asynchronous.
25Async commands return data to the client in the form of events. 25Async commands return data to the client in the form of events.
26Sync commands must only return data through the function return value 26Sync commands must only return data through the function return value
@@ -62,7 +62,7 @@ class Command:
62 try: 62 try:
63 command = commandline.pop(0) 63 command = commandline.pop(0)
64 if command in CommandsSync.__dict__: 64 if command in CommandsSync.__dict__:
65 # Can run synchronous commands straight away 65 # Can run synchronous commands straight away
66 return getattr(CommandsSync, command)(self.cmds_sync, self, commandline) 66 return getattr(CommandsSync, command)(self.cmds_sync, self, commandline)
67 if self.currentAsyncCommand is not None: 67 if self.currentAsyncCommand is not None:
68 return "Busy (%s in progress)" % self.currentAsyncCommand[0] 68 return "Busy (%s in progress)" % self.currentAsyncCommand[0]
@@ -89,7 +89,17 @@ class Command:
89 return False 89 return False
90 else: 90 else:
91 return False 91 return False
92 except: 92 except KeyboardInterrupt as exc:
93 self.finishAsyncCommand("Interrupted")
94 return False
95 except SystemExit as exc:
96 arg = exc.args[0]
97 if isinstance(arg, basestring):
98 self.finishAsyncCommand(arg)
99 else:
100 self.finishAsyncCommand("Exited with %s" % arg)
101 return False
102 except Exception:
93 import traceback 103 import traceback
94 self.finishAsyncCommand(traceback.format_exc()) 104 self.finishAsyncCommand(traceback.format_exc())
95 return False 105 return False
@@ -268,6 +278,3 @@ class CookerCommandSetExitCode(bb.event.Event):
268 def __init__(self, exitcode): 278 def __init__(self, exitcode):
269 bb.event.Event.__init__(self) 279 bb.event.Event.__init__(self)
270 self.exitcode = int(exitcode) 280 self.exitcode = int(exitcode)
271
272
273
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 2406dfe95b..488bc610d2 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -22,11 +22,13 @@
22# with this program; if not, write to the Free Software Foundation, Inc., 22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 24
25import sys, os, getopt, glob, copy, os.path, re, time 25from __future__ import print_function
26import sys, os, glob, os.path, re, time
27import sre_constants
28from cStringIO import StringIO
29from contextlib import closing
26import bb 30import bb
27from bb import utils, data, parse, event, cache, providers, taskdata, runqueue 31from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
28from bb import command
29import itertools, sre_constants
30 32
31class MultipleMatches(Exception): 33class MultipleMatches(Exception):
32 """ 34 """
@@ -121,11 +123,11 @@ class BBCooker:
121 self.commandlineAction = None 123 self.commandlineAction = None
122 124
123 if 'world' in self.configuration.pkgs_to_build: 125 if 'world' in self.configuration.pkgs_to_build:
124 bb.error("'world' is not a valid target for --environment.") 126 bb.msg.error(bb.msg.domain.Build, "'world' is not a valid target for --environment.")
125 elif len(self.configuration.pkgs_to_build) > 1: 127 elif len(self.configuration.pkgs_to_build) > 1:
126 bb.error("Only one target can be used with the --environment option.") 128 bb.msg.error(bb.msg.domain.Build, "Only one target can be used with the --environment option.")
127 elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0: 129 elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
128 bb.error("No target should be used with the --environment and --buildfile options.") 130 bb.msg.error(bb.msg.domain.Build, "No target should be used with the --environment and --buildfile options.")
129 elif len(self.configuration.pkgs_to_build) > 0: 131 elif len(self.configuration.pkgs_to_build) > 0:
130 self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build] 132 self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
131 else: 133 else:
@@ -138,21 +140,18 @@ class BBCooker:
138 self.commandlineAction = ["showVersions"] 140 self.commandlineAction = ["showVersions"]
139 elif self.configuration.parse_only: 141 elif self.configuration.parse_only:
140 self.commandlineAction = ["parseFiles"] 142 self.commandlineAction = ["parseFiles"]
141 # FIXME - implement
142 #elif self.configuration.interactive:
143 # self.interactiveMode()
144 elif self.configuration.dot_graph: 143 elif self.configuration.dot_graph:
145 if self.configuration.pkgs_to_build: 144 if self.configuration.pkgs_to_build:
146 self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd] 145 self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
147 else: 146 else:
148 self.commandlineAction = None 147 self.commandlineAction = None
149 bb.error("Please specify a package name for dependency graph generation.") 148 bb.msg.error(bb.msg.domain.Build, "Please specify a package name for dependency graph generation.")
150 else: 149 else:
151 if self.configuration.pkgs_to_build: 150 if self.configuration.pkgs_to_build:
152 self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd] 151 self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
153 else: 152 else:
154 self.commandlineAction = None 153 self.commandlineAction = None
155 bb.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") 154 bb.msg.error(bb.msg.domain.Build, "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
156 155
157 def runCommands(self, server, data, abort): 156 def runCommands(self, server, data, abort):
158 """ 157 """
@@ -174,14 +173,14 @@ class BBCooker:
174 except bb.build.FuncFailed: 173 except bb.build.FuncFailed:
175 bb.msg.error(bb.msg.domain.Build, "task stack execution failed") 174 bb.msg.error(bb.msg.domain.Build, "task stack execution failed")
176 raise 175 raise
177 except bb.build.EventException, e: 176 except bb.build.EventException as e:
178 event = e.args[1] 177 event = e.args[1]
179 bb.msg.error(bb.msg.domain.Build, "%s event exception, aborting" % bb.event.getName(event)) 178 bb.msg.error(bb.msg.domain.Build, "%s event exception, aborting" % bb.event.getName(event))
180 raise 179 raise
181 180
182 def tryBuild(self, fn, task): 181 def tryBuild(self, fn, task):
183 """ 182 """
184 Build a provider and its dependencies. 183 Build a provider and its dependencies.
185 build_depends is a list of previous build dependencies (not runtime) 184 build_depends is a list of previous build dependencies (not runtime)
186 If build_depends is empty, we're dealing with a runtime depends 185 If build_depends is empty, we're dealing with a runtime depends
187 """ 186 """
@@ -206,7 +205,7 @@ class BBCooker:
206 205
207 # Sort by priority 206 # Sort by priority
208 for pn in pkg_pn: 207 for pn in pkg_pn:
209 (last_ver,last_file,pref_ver,pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status) 208 (last_ver, last_file, pref_ver, pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status)
210 preferred_versions[pn] = (pref_ver, pref_file) 209 preferred_versions[pn] = (pref_ver, pref_file)
211 latest_versions[pn] = (last_ver, last_file) 210 latest_versions[pn] = (last_ver, last_file)
212 211
@@ -260,27 +259,22 @@ class BBCooker:
260 if fn: 259 if fn:
261 try: 260 try:
262 envdata = self.bb_cache.loadDataFull(fn, self.configuration.data) 261 envdata = self.bb_cache.loadDataFull(fn, self.configuration.data)
263 except IOError, e: 262 except IOError as e:
264 bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e)) 263 bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e))
265 raise 264 raise
266 except Exception, e: 265 except Exception as e:
267 bb.msg.error(bb.msg.domain.Parsing, "%s" % e) 266 bb.msg.error(bb.msg.domain.Parsing, "%s" % e)
268 raise 267 raise
269 268
270 class dummywrite:
271 def __init__(self):
272 self.writebuf = ""
273 def write(self, output):
274 self.writebuf = self.writebuf + output
275
276 # emit variables and shell functions 269 # emit variables and shell functions
277 try: 270 try:
278 data.update_data(envdata) 271 data.update_data(envdata)
279 wb = dummywrite() 272 with closing(StringIO()) as env:
280 data.emit_env(wb, envdata, True) 273 data.emit_env(env, envdata, True)
281 bb.msg.plain(wb.writebuf) 274 bb.msg.plain(env.getvalue())
282 except Exception, e: 275 except Exception as e:
283 bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) 276 bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
277
284 # emit the metadata which isnt valid shell 278 # emit the metadata which isnt valid shell
285 data.expandKeys(envdata) 279 data.expandKeys(envdata)
286 for e in envdata.keys(): 280 for e in envdata.keys():
@@ -315,7 +309,7 @@ class BBCooker:
315 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) 309 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
316 rq.prepare_runqueue() 310 rq.prepare_runqueue()
317 311
318 seen_fnids = [] 312 seen_fnids = []
319 depend_tree = {} 313 depend_tree = {}
320 depend_tree["depends"] = {} 314 depend_tree["depends"] = {}
321 depend_tree["tdepends"] = {} 315 depend_tree["tdepends"] = {}
@@ -352,7 +346,7 @@ class BBCooker:
352 346
353 depend_tree["rdepends-pn"][pn] = [] 347 depend_tree["rdepends-pn"][pn] = []
354 for rdep in taskdata.rdepids[fnid]: 348 for rdep in taskdata.rdepids[fnid]:
355 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep]) 349 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
356 350
357 rdepends = self.status.rundeps[fn] 351 rdepends = self.status.rundeps[fn]
358 for package in rdepends: 352 for package in rdepends:
@@ -397,51 +391,51 @@ class BBCooker:
397 391
398 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn 392 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
399 depends_file = file('pn-depends.dot', 'w' ) 393 depends_file = file('pn-depends.dot', 'w' )
400 print >> depends_file, "digraph depends {" 394 print("digraph depends {", file=depends_file)
401 for pn in depgraph["pn"]: 395 for pn in depgraph["pn"]:
402 fn = depgraph["pn"][pn]["filename"] 396 fn = depgraph["pn"][pn]["filename"]
403 version = depgraph["pn"][pn]["version"] 397 version = depgraph["pn"][pn]["version"]
404 print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn) 398 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
405 for pn in depgraph["depends"]: 399 for pn in depgraph["depends"]:
406 for depend in depgraph["depends"][pn]: 400 for depend in depgraph["depends"][pn]:
407 print >> depends_file, '"%s" -> "%s"' % (pn, depend) 401 print('"%s" -> "%s"' % (pn, depend), file=depends_file)
408 for pn in depgraph["rdepends-pn"]: 402 for pn in depgraph["rdepends-pn"]:
409 for rdepend in depgraph["rdepends-pn"][pn]: 403 for rdepend in depgraph["rdepends-pn"][pn]:
410 print >> depends_file, '"%s" -> "%s" [style=dashed]' % (pn, rdepend) 404 print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
411 print >> depends_file, "}" 405 print("}", file=depends_file)
412 bb.msg.plain("PN dependencies saved to 'pn-depends.dot'") 406 bb.msg.plain("PN dependencies saved to 'pn-depends.dot'")
413 407
414 depends_file = file('package-depends.dot', 'w' ) 408 depends_file = file('package-depends.dot', 'w' )
415 print >> depends_file, "digraph depends {" 409 print("digraph depends {", file=depends_file)
416 for package in depgraph["packages"]: 410 for package in depgraph["packages"]:
417 pn = depgraph["packages"][package]["pn"] 411 pn = depgraph["packages"][package]["pn"]
418 fn = depgraph["packages"][package]["filename"] 412 fn = depgraph["packages"][package]["filename"]
419 version = depgraph["packages"][package]["version"] 413 version = depgraph["packages"][package]["version"]
420 if package == pn: 414 if package == pn:
421 print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn) 415 print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
422 else: 416 else:
423 print >> depends_file, '"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn) 417 print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
424 for depend in depgraph["depends"][pn]: 418 for depend in depgraph["depends"][pn]:
425 print >> depends_file, '"%s" -> "%s"' % (package, depend) 419 print('"%s" -> "%s"' % (package, depend), file=depends_file)
426 for package in depgraph["rdepends-pkg"]: 420 for package in depgraph["rdepends-pkg"]:
427 for rdepend in depgraph["rdepends-pkg"][package]: 421 for rdepend in depgraph["rdepends-pkg"][package]:
428 print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend) 422 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
429 for package in depgraph["rrecs-pkg"]: 423 for package in depgraph["rrecs-pkg"]:
430 for rdepend in depgraph["rrecs-pkg"][package]: 424 for rdepend in depgraph["rrecs-pkg"][package]:
431 print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend) 425 print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
432 print >> depends_file, "}" 426 print("}", file=depends_file)
433 bb.msg.plain("Package dependencies saved to 'package-depends.dot'") 427 bb.msg.plain("Package dependencies saved to 'package-depends.dot'")
434 428
435 tdepends_file = file('task-depends.dot', 'w' ) 429 tdepends_file = file('task-depends.dot', 'w' )
436 print >> tdepends_file, "digraph depends {" 430 print("digraph depends {", file=tdepends_file)
437 for task in depgraph["tdepends"]: 431 for task in depgraph["tdepends"]:
438 (pn, taskname) = task.rsplit(".", 1) 432 (pn, taskname) = task.rsplit(".", 1)
439 fn = depgraph["pn"][pn]["filename"] 433 fn = depgraph["pn"][pn]["filename"]
440 version = depgraph["pn"][pn]["version"] 434 version = depgraph["pn"][pn]["version"]
441 print >> tdepends_file, '"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn) 435 print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
442 for dep in depgraph["tdepends"][task]: 436 for dep in depgraph["tdepends"][task]:
443 print >> tdepends_file, '"%s" -> "%s"' % (task, dep) 437 print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
444 print >> tdepends_file, "}" 438 print("}", file=tdepends_file)
445 bb.msg.plain("Task dependencies saved to 'task-depends.dot'") 439 bb.msg.plain("Task dependencies saved to 'task-depends.dot'")
446 440
447 def buildDepgraph( self ): 441 def buildDepgraph( self ):
@@ -452,9 +446,12 @@ class BBCooker:
452 bb.data.update_data(localdata) 446 bb.data.update_data(localdata)
453 bb.data.expandKeys(localdata) 447 bb.data.expandKeys(localdata)
454 448
449 matched = set()
455 def calc_bbfile_priority(filename): 450 def calc_bbfile_priority(filename):
456 for (regex, pri) in self.status.bbfile_config_priorities: 451 for _, _, regex, pri in self.status.bbfile_config_priorities:
457 if regex.match(filename): 452 if regex.match(filename):
453 if not regex in matched:
454 matched.add(regex)
458 return pri 455 return pri
459 return 0 456 return 0
460 457
@@ -473,6 +470,11 @@ class BBCooker:
473 for p in self.status.pkg_fn: 470 for p in self.status.pkg_fn:
474 self.status.bbfile_priority[p] = calc_bbfile_priority(p) 471 self.status.bbfile_priority[p] = calc_bbfile_priority(p)
475 472
473 for collection, pattern, regex, _ in self.status.bbfile_config_priorities:
474 if not regex in matched:
475 bb.msg.warn(bb.msg.domain.Provider, "No bb files matched BBFILE_PATTERN_%s '%s'" %
476 (collection, pattern))
477
476 def buildWorldTargetList(self): 478 def buildWorldTargetList(self):
477 """ 479 """
478 Build package list for "bitbake world" 480 Build package list for "bitbake world"
@@ -505,31 +507,57 @@ class BBCooker:
505 """Drop off into a shell""" 507 """Drop off into a shell"""
506 try: 508 try:
507 from bb import shell 509 from bb import shell
508 except ImportError, details: 510 except ImportError as details:
509 bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details ) 511 bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details )
510 else: 512 else:
511 shell.start( self ) 513 shell.start( self )
512 514
515 def _findLayerConf(self):
516 path = os.getcwd()
517 while path != "/":
518 bblayers = os.path.join(path, "conf", "bblayers.conf")
519 if os.path.exists(bblayers):
520 return bblayers
521
522 path, _ = os.path.split(path)
523
513 def parseConfigurationFiles(self, files): 524 def parseConfigurationFiles(self, files):
514 try: 525 try:
515 data = self.configuration.data 526 data = self.configuration.data
516 for f in files: 527 for f in files:
517 data = bb.parse.handle(f, data) 528 data = bb.parse.handle(f, data)
518 529
519 layerconf = os.path.join(os.getcwd(), "conf", "bblayers.conf") 530 layerconf = self._findLayerConf()
520 if os.path.exists(layerconf): 531 if layerconf:
521 bb.msg.debug(2, bb.msg.domain.Parsing, "Found bblayers.conf (%s)" % layerconf) 532 bb.msg.debug(2, bb.msg.domain.Parsing, "Found bblayers.conf (%s)" % layerconf)
522 data = bb.parse.handle(layerconf, data) 533 data = bb.parse.handle(layerconf, data)
523 534
524 layers = (bb.data.getVar('BBLAYERS', data, True) or "").split() 535 layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
525 536
537 data = bb.data.createCopy(data)
526 for layer in layers: 538 for layer in layers:
527 bb.msg.debug(2, bb.msg.domain.Parsing, "Adding layer %s" % layer) 539 bb.msg.debug(2, bb.msg.domain.Parsing, "Adding layer %s" % layer)
528 bb.data.setVar('LAYERDIR', layer, data) 540 bb.data.setVar('LAYERDIR', layer, data)
529 data = bb.parse.handle(os.path.join(layer, "conf", "layer.conf"), data) 541 data = bb.parse.handle(os.path.join(layer, "conf", "layer.conf"), data)
530 542
543 # XXX: Hack, relies on the local keys of the datasmart
544 # instance being stored in the 'dict' attribute and makes
545 # assumptions about how variable expansion works, but
546 # there's no better way to force an expansion of a single
547 # variable across the datastore today, and this at least
548 # lets us reference LAYERDIR without having to immediately
549 # eval all our variables that use it.
550 for key in data.dict:
551 if key != "_data":
552 value = data.getVar(key, False)
553 if value and "${LAYERDIR}" in value:
554 data.setVar(key, value.replace("${LAYERDIR}", layer))
555
531 bb.data.delVar('LAYERDIR', data) 556 bb.data.delVar('LAYERDIR', data)
532 557
558 if not data.getVar("BBPATH", True):
559 bb.fatal("The BBPATH variable is not set")
560
533 data = bb.parse.handle(os.path.join("conf", "bitbake.conf"), data) 561 data = bb.parse.handle(os.path.join("conf", "bitbake.conf"), data)
534 562
535 self.configuration.data = data 563 self.configuration.data = data
@@ -541,16 +569,17 @@ class BBCooker:
541 569
542 # Nomally we only register event handlers at the end of parsing .bb files 570 # Nomally we only register event handlers at the end of parsing .bb files
543 # We register any handlers we've found so far here... 571 # We register any handlers we've found so far here...
544 for var in data.getVar('__BBHANDLERS', self.configuration.data) or []: 572 for var in bb.data.getVar('__BBHANDLERS', self.configuration.data) or []:
545 bb.event.register(var,bb.data.getVar(var, self.configuration.data)) 573 bb.event.register(var, bb.data.getVar(var, self.configuration.data))
546 574
547 bb.fetch.fetcher_init(self.configuration.data) 575 bb.fetch.fetcher_init(self.configuration.data)
548 576
549 bb.event.fire(bb.event.ConfigParsed(), self.configuration.data) 577 bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
550 578
551 except IOError, e: 579
580 except IOError as e:
552 bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (files, str(e))) 581 bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (files, str(e)))
553 except bb.parse.ParseError, details: 582 except bb.parse.ParseError as details:
554 bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (files, details) ) 583 bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (files, details) )
555 584
556 def handleCollections( self, collections ): 585 def handleCollections( self, collections ):
@@ -573,7 +602,7 @@ class BBCooker:
573 continue 602 continue
574 try: 603 try:
575 pri = int(priority) 604 pri = int(priority)
576 self.status.bbfile_config_priorities.append((cre, pri)) 605 self.status.bbfile_config_priorities.append((c, regex, cre, pri))
577 except ValueError: 606 except ValueError:
578 bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority)) 607 bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority))
579 608
@@ -582,8 +611,8 @@ class BBCooker:
582 Setup any variables needed before starting a build 611 Setup any variables needed before starting a build
583 """ 612 """
584 if not bb.data.getVar("BUILDNAME", self.configuration.data): 613 if not bb.data.getVar("BUILDNAME", self.configuration.data):
585 bb.data.setVar("BUILDNAME", os.popen('date +%Y%m%d%H%M').readline().strip(), self.configuration.data) 614 bb.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'), self.configuration.data)
586 bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S',time.gmtime()), self.configuration.data) 615 bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()), self.configuration.data)
587 616
588 def matchFiles(self, buildfile): 617 def matchFiles(self, buildfile):
589 """ 618 """
@@ -630,13 +659,19 @@ class BBCooker:
630 if (task == None): 659 if (task == None):
631 task = self.configuration.cmd 660 task = self.configuration.cmd
632 661
633 fn = self.matchFile(buildfile) 662 self.bb_cache = bb.cache.init(self)
663 self.status = bb.cache.CacheData()
664
665 (fn, cls) = self.bb_cache.virtualfn2realfn(buildfile)
666 buildfile = self.matchFile(fn)
667 fn = self.bb_cache.realfn2virtual(buildfile, cls)
668
634 self.buildSetVars() 669 self.buildSetVars()
635 670
636 # Load data into the cache for fn and parse the loaded cache data 671 # Load data into the cache for fn and parse the loaded cache data
637 self.bb_cache = bb.cache.init(self) 672 the_data = self.bb_cache.loadDataFull(fn, self.configuration.data)
638 self.status = bb.cache.CacheData() 673 self.bb_cache.setData(fn, buildfile, the_data)
639 self.bb_cache.loadData(fn, self.configuration.data, self.status) 674 self.bb_cache.handle_data(fn, self.status)
640 675
641 # Tweak some variables 676 # Tweak some variables
642 item = self.bb_cache.getVar('PN', fn, True) 677 item = self.bb_cache.getVar('PN', fn, True)
@@ -675,8 +710,8 @@ class BBCooker:
675 failures = 0 710 failures = 0
676 try: 711 try:
677 retval = rq.execute_runqueue() 712 retval = rq.execute_runqueue()
678 except runqueue.TaskFailure, fnids: 713 except runqueue.TaskFailure as exc:
679 for fnid in fnids: 714 for fnid in exc.args:
680 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) 715 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
681 failures = failures + 1 716 failures = failures + 1
682 retval = False 717 retval = False
@@ -711,8 +746,8 @@ class BBCooker:
711 failures = 0 746 failures = 0
712 try: 747 try:
713 retval = rq.execute_runqueue() 748 retval = rq.execute_runqueue()
714 except runqueue.TaskFailure, fnids: 749 except runqueue.TaskFailure as exc:
715 for fnid in fnids: 750 for fnid in exc.args:
716 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) 751 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
717 failures = failures + 1 752 failures = failures + 1
718 retval = False 753 retval = False
@@ -769,10 +804,10 @@ class BBCooker:
769 804
770 ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or "" 805 ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
771 self.status.ignored_dependencies = set(ignore.split()) 806 self.status.ignored_dependencies = set(ignore.split())
772 807
773 for dep in self.configuration.extra_assume_provided: 808 for dep in self.configuration.extra_assume_provided:
774 self.status.ignored_dependencies.add(dep) 809 self.status.ignored_dependencies.add(dep)
775 810
776 self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) ) 811 self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
777 812
778 bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files") 813 bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files")
@@ -810,7 +845,7 @@ class BBCooker:
810 for f in contents: 845 for f in contents:
811 (root, ext) = os.path.splitext(f) 846 (root, ext) = os.path.splitext(f)
812 if ext == ".bb": 847 if ext == ".bb":
813 bbfiles.append(os.path.abspath(os.path.join(os.getcwd(),f))) 848 bbfiles.append(os.path.abspath(os.path.join(os.getcwd(), f)))
814 return bbfiles 849 return bbfiles
815 850
816 def find_bbfiles( self, path ): 851 def find_bbfiles( self, path ):
@@ -822,7 +857,7 @@ class BBCooker:
822 for ignored in ('SCCS', 'CVS', '.svn'): 857 for ignored in ('SCCS', 'CVS', '.svn'):
823 if ignored in dirs: 858 if ignored in dirs:
824 dirs.remove(ignored) 859 dirs.remove(ignored)
825 found += [join(dir,f) for f in files if f.endswith('.bb')] 860 found += [join(dir, f) for f in files if f.endswith('.bb')]
826 861
827 return found 862 return found
828 863
@@ -906,9 +941,9 @@ class BBCooker:
906 pout.close() 941 pout.close()
907 else: 942 else:
908 self.server.serve_forever() 943 self.server.serve_forever()
909 944
910 bb.event.fire(CookerExit(), self.configuration.event_data) 945 bb.event.fire(CookerExit(), self.configuration.event_data)
911 946
912class CookerExit(bb.event.Event): 947class CookerExit(bb.event.Event):
913 """ 948 """
914 Notify clients of the Cooker shutdown 949 Notify clients of the Cooker shutdown
@@ -937,9 +972,9 @@ class CookerParser:
937 self.pointer = 0 972 self.pointer = 0
938 973
939 def parse_next(self): 974 def parse_next(self):
975 cooker = self.cooker
940 if self.pointer < len(self.filelist): 976 if self.pointer < len(self.filelist):
941 f = self.filelist[self.pointer] 977 f = self.filelist[self.pointer]
942 cooker = self.cooker
943 978
944 try: 979 try:
945 fromCache, skipped, virtuals = cooker.bb_cache.loadData(f, cooker.configuration.data, cooker.status) 980 fromCache, skipped, virtuals = cooker.bb_cache.loadData(f, cooker.configuration.data, cooker.status)
@@ -951,7 +986,7 @@ class CookerParser:
951 self.skipped += skipped 986 self.skipped += skipped
952 self.virtuals += virtuals 987 self.virtuals += virtuals
953 988
954 except IOError, e: 989 except IOError as e:
955 self.error += 1 990 self.error += 1
956 cooker.bb_cache.remove(f) 991 cooker.bb_cache.remove(f)
957 bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e)) 992 bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e))
@@ -960,7 +995,7 @@ class CookerParser:
960 cooker.bb_cache.remove(f) 995 cooker.bb_cache.remove(f)
961 cooker.bb_cache.sync() 996 cooker.bb_cache.sync()
962 raise 997 raise
963 except Exception, e: 998 except Exception as e:
964 self.error += 1 999 self.error += 1
965 cooker.bb_cache.remove(f) 1000 cooker.bb_cache.remove(f)
966 bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f)) 1001 bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f))
@@ -978,4 +1013,3 @@ class CookerParser:
978 raise ParsingErrorsFound 1013 raise ParsingErrorsFound
979 return False 1014 return False
980 return True 1015 return True
981
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
index 1a8bb379f4..f0714b3af6 100644
--- a/bitbake/lib/bb/daemonize.py
+++ b/bitbake/lib/bb/daemonize.py
@@ -1,191 +1,190 @@
1""" 1"""
2Python Deamonizing helper 2Python Deamonizing helper
3 3
4Configurable daemon behaviors: 4Configurable daemon behaviors:
5 5
6 1.) The current working directory set to the "/" directory. 6 1.) The current working directory set to the "/" directory.
7 2.) The current file creation mode mask set to 0. 7 2.) The current file creation mode mask set to 0.
8 3.) Close all open files (1024). 8 3.) Close all open files (1024).
9 4.) Redirect standard I/O streams to "/dev/null". 9 4.) Redirect standard I/O streams to "/dev/null".
10 10
11A failed call to fork() now raises an exception. 11A failed call to fork() now raises an exception.
12 12
13References: 13References:
14 1) Advanced Programming in the Unix Environment: W. Richard Stevens 14 1) Advanced Programming in the Unix Environment: W. Richard Stevens
15 2) Unix Programming Frequently Asked Questions: 15 2) Unix Programming Frequently Asked Questions:
16 http://www.erlenstar.demon.co.uk/unix/faq_toc.html 16 http://www.erlenstar.demon.co.uk/unix/faq_toc.html
17 17
18Modified to allow a function to be daemonized and return for 18Modified to allow a function to be daemonized and return for
19bitbake use by Richard Purdie 19bitbake use by Richard Purdie
20""" 20"""
21 21
22__author__ = "Chad J. Schroeder" 22__author__ = "Chad J. Schroeder"
23__copyright__ = "Copyright (C) 2005 Chad J. Schroeder" 23__copyright__ = "Copyright (C) 2005 Chad J. Schroeder"
24__version__ = "0.2" 24__version__ = "0.2"
25 25
26# Standard Python modules. 26# Standard Python modules.
27import os # Miscellaneous OS interfaces. 27import os # Miscellaneous OS interfaces.
28import sys # System-specific parameters and functions. 28import sys # System-specific parameters and functions.
29 29
30# Default daemon parameters. 30# Default daemon parameters.
31# File mode creation mask of the daemon. 31# File mode creation mask of the daemon.
32# For BitBake's children, we do want to inherit the parent umask. 32# For BitBake's children, we do want to inherit the parent umask.
33UMASK = None 33UMASK = None
34 34
35# Default maximum for the number of available file descriptors. 35# Default maximum for the number of available file descriptors.
36MAXFD = 1024 36MAXFD = 1024
37 37
38# The standard I/O file descriptors are redirected to /dev/null by default. 38# The standard I/O file descriptors are redirected to /dev/null by default.
39if (hasattr(os, "devnull")): 39if (hasattr(os, "devnull")):
40 REDIRECT_TO = os.devnull 40 REDIRECT_TO = os.devnull
41else: 41else:
42 REDIRECT_TO = "/dev/null" 42 REDIRECT_TO = "/dev/null"
43 43
44def createDaemon(function, logfile): 44def createDaemon(function, logfile):
45 """ 45 """
46 Detach a process from the controlling terminal and run it in the 46 Detach a process from the controlling terminal and run it in the
47 background as a daemon, returning control to the caller. 47 background as a daemon, returning control to the caller.
48 """ 48 """
49 49
50 try: 50 try:
51 # Fork a child process so the parent can exit. This returns control to 51 # Fork a child process so the parent can exit. This returns control to
52 # the command-line or shell. It also guarantees that the child will not 52 # the command-line or shell. It also guarantees that the child will not
53 # be a process group leader, since the child receives a new process ID 53 # be a process group leader, since the child receives a new process ID
54 # and inherits the parent's process group ID. This step is required 54 # and inherits the parent's process group ID. This step is required
55 # to insure that the next call to os.setsid is successful. 55 # to insure that the next call to os.setsid is successful.
56 pid = os.fork() 56 pid = os.fork()
57 except OSError, e: 57 except OSError as e:
58 raise Exception, "%s [%d]" % (e.strerror, e.errno) 58 raise Exception("%s [%d]" % (e.strerror, e.errno))
59 59
60 if (pid == 0): # The first child. 60 if (pid == 0): # The first child.
61 # To become the session leader of this new session and the process group 61 # To become the session leader of this new session and the process group
62 # leader of the new process group, we call os.setsid(). The process is 62 # leader of the new process group, we call os.setsid(). The process is
63 # also guaranteed not to have a controlling terminal. 63 # also guaranteed not to have a controlling terminal.
64 os.setsid() 64 os.setsid()
65 65
66 # Is ignoring SIGHUP necessary? 66 # Is ignoring SIGHUP necessary?
67 # 67 #
68 # It's often suggested that the SIGHUP signal should be ignored before 68 # It's often suggested that the SIGHUP signal should be ignored before
69 # the second fork to avoid premature termination of the process. The 69 # the second fork to avoid premature termination of the process. The
70 # reason is that when the first child terminates, all processes, e.g. 70 # reason is that when the first child terminates, all processes, e.g.
71 # the second child, in the orphaned group will be sent a SIGHUP. 71 # the second child, in the orphaned group will be sent a SIGHUP.
72 # 72 #
73 # "However, as part of the session management system, there are exactly 73 # "However, as part of the session management system, there are exactly
74 # two cases where SIGHUP is sent on the death of a process: 74 # two cases where SIGHUP is sent on the death of a process:
75 # 75 #
76 # 1) When the process that dies is the session leader of a session that 76 # 1) When the process that dies is the session leader of a session that
77 # is attached to a terminal device, SIGHUP is sent to all processes 77 # is attached to a terminal device, SIGHUP is sent to all processes
78 # in the foreground process group of that terminal device. 78 # in the foreground process group of that terminal device.
79 # 2) When the death of a process causes a process group to become 79 # 2) When the death of a process causes a process group to become
80 # orphaned, and one or more processes in the orphaned group are 80 # orphaned, and one or more processes in the orphaned group are
81 # stopped, then SIGHUP and SIGCONT are sent to all members of the 81 # stopped, then SIGHUP and SIGCONT are sent to all members of the
82 # orphaned group." [2] 82 # orphaned group." [2]
83 # 83 #
84 # The first case can be ignored since the child is guaranteed not to have 84 # The first case can be ignored since the child is guaranteed not to have
85 # a controlling terminal. The second case isn't so easy to dismiss. 85 # a controlling terminal. The second case isn't so easy to dismiss.
86 # The process group is orphaned when the first child terminates and 86 # The process group is orphaned when the first child terminates and
87 # POSIX.1 requires that every STOPPED process in an orphaned process 87 # POSIX.1 requires that every STOPPED process in an orphaned process
88 # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the 88 # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the
89 # second child is not STOPPED though, we can safely forego ignoring the 89 # second child is not STOPPED though, we can safely forego ignoring the
90 # SIGHUP signal. In any case, there are no ill-effects if it is ignored. 90 # SIGHUP signal. In any case, there are no ill-effects if it is ignored.
91 # 91 #
92 # import signal # Set handlers for asynchronous events. 92 # import signal # Set handlers for asynchronous events.
93 # signal.signal(signal.SIGHUP, signal.SIG_IGN) 93 # signal.signal(signal.SIGHUP, signal.SIG_IGN)
94 94
95 try: 95 try:
96 # Fork a second child and exit immediately to prevent zombies. This 96 # Fork a second child and exit immediately to prevent zombies. This
97 # causes the second child process to be orphaned, making the init 97 # causes the second child process to be orphaned, making the init
98 # process responsible for its cleanup. And, since the first child is 98 # process responsible for its cleanup. And, since the first child is
99 # a session leader without a controlling terminal, it's possible for 99 # a session leader without a controlling terminal, it's possible for
100 # it to acquire one by opening a terminal in the future (System V- 100 # it to acquire one by opening a terminal in the future (System V-
101 # based systems). This second fork guarantees that the child is no 101 # based systems). This second fork guarantees that the child is no
102 # longer a session leader, preventing the daemon from ever acquiring 102 # longer a session leader, preventing the daemon from ever acquiring
103 # a controlling terminal. 103 # a controlling terminal.
104 pid = os.fork() # Fork a second child. 104 pid = os.fork() # Fork a second child.
105 except OSError, e: 105 except OSError as e:
106 raise Exception, "%s [%d]" % (e.strerror, e.errno) 106 raise Exception("%s [%d]" % (e.strerror, e.errno))
107 107
108 if (pid == 0): # The second child. 108 if (pid == 0): # The second child.
109 # We probably don't want the file mode creation mask inherited from 109 # We probably don't want the file mode creation mask inherited from
110 # the parent, so we give the child complete control over permissions. 110 # the parent, so we give the child complete control over permissions.
111 if UMASK is not None: 111 if UMASK is not None:
112 os.umask(UMASK) 112 os.umask(UMASK)
113 else: 113 else:
114 # Parent (the first child) of the second child. 114 # Parent (the first child) of the second child.
115 os._exit(0) 115 os._exit(0)
116 else: 116 else:
117 # exit() or _exit()? 117 # exit() or _exit()?
118 # _exit is like exit(), but it doesn't call any functions registered 118 # _exit is like exit(), but it doesn't call any functions registered
119 # with atexit (and on_exit) or any registered signal handlers. It also 119 # with atexit (and on_exit) or any registered signal handlers. It also
120 # closes any open file descriptors. Using exit() may cause all stdio 120 # closes any open file descriptors. Using exit() may cause all stdio
121 # streams to be flushed twice and any temporary files may be unexpectedly 121 # streams to be flushed twice and any temporary files may be unexpectedly
122 # removed. It's therefore recommended that child branches of a fork() 122 # removed. It's therefore recommended that child branches of a fork()
123 # and the parent branch(es) of a daemon use _exit(). 123 # and the parent branch(es) of a daemon use _exit().
124 return 124 return
125 125
126 # Close all open file descriptors. This prevents the child from keeping 126 # Close all open file descriptors. This prevents the child from keeping
127 # open any file descriptors inherited from the parent. There is a variety 127 # open any file descriptors inherited from the parent. There is a variety
128 # of methods to accomplish this task. Three are listed below. 128 # of methods to accomplish this task. Three are listed below.
129 # 129 #
130 # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum 130 # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
131 # number of open file descriptors to close. If it doesn't exists, use 131 # number of open file descriptors to close. If it doesn't exists, use
132 # the default value (configurable). 132 # the default value (configurable).
133 # 133 #
134 # try: 134 # try:
135 # maxfd = os.sysconf("SC_OPEN_MAX") 135 # maxfd = os.sysconf("SC_OPEN_MAX")
136 # except (AttributeError, ValueError): 136 # except (AttributeError, ValueError):
137 # maxfd = MAXFD 137 # maxfd = MAXFD
138 # 138 #
139 # OR 139 # OR
140 # 140 #
141 # if (os.sysconf_names.has_key("SC_OPEN_MAX")): 141 # if (os.sysconf_names.has_key("SC_OPEN_MAX")):
142 # maxfd = os.sysconf("SC_OPEN_MAX") 142 # maxfd = os.sysconf("SC_OPEN_MAX")
143 # else: 143 # else:
144 # maxfd = MAXFD 144 # maxfd = MAXFD
145 # 145 #
146 # OR 146 # OR
147 # 147 #
148 # Use the getrlimit method to retrieve the maximum file descriptor number 148 # Use the getrlimit method to retrieve the maximum file descriptor number
149 # that can be opened by this process. If there is not limit on the 149 # that can be opened by this process. If there is not limit on the
150 # resource, use the default value. 150 # resource, use the default value.
151 # 151 #
152 import resource # Resource usage information. 152 import resource # Resource usage information.
153 maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1] 153 maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
154 if (maxfd == resource.RLIM_INFINITY): 154 if (maxfd == resource.RLIM_INFINITY):
155 maxfd = MAXFD 155 maxfd = MAXFD
156 156
157 # Iterate through and close all file descriptors. 157 # Iterate through and close all file descriptors.
158# for fd in range(0, maxfd): 158# for fd in range(0, maxfd):
159# try: 159# try:
160# os.close(fd) 160# os.close(fd)
161# except OSError: # ERROR, fd wasn't open to begin with (ignored) 161# except OSError: # ERROR, fd wasn't open to begin with (ignored)
162# pass 162# pass
163 163
164 # Redirect the standard I/O file descriptors to the specified file. Since 164 # Redirect the standard I/O file descriptors to the specified file. Since
165 # the daemon has no controlling terminal, most daemons redirect stdin, 165 # the daemon has no controlling terminal, most daemons redirect stdin,
166 # stdout, and stderr to /dev/null. This is done to prevent side-effects 166 # stdout, and stderr to /dev/null. This is done to prevent side-effects
167 # from reads and writes to the standard I/O file descriptors. 167 # from reads and writes to the standard I/O file descriptors.
168 168
169 # This call to open is guaranteed to return the lowest file descriptor, 169 # This call to open is guaranteed to return the lowest file descriptor,
170 # which will be 0 (stdin), since it was closed above. 170 # which will be 0 (stdin), since it was closed above.
171# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0) 171# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0)
172 172
173 # Duplicate standard input to standard output and standard error. 173 # Duplicate standard input to standard output and standard error.
174# os.dup2(0, 1) # standard output (1) 174# os.dup2(0, 1) # standard output (1)
175# os.dup2(0, 2) # standard error (2) 175# os.dup2(0, 2) # standard error (2)
176 176
177 177
178 si = file('/dev/null', 'r') 178 si = file('/dev/null', 'r')
179 so = file(logfile, 'w') 179 so = file(logfile, 'w')
180 se = so 180 se = so
181 181
182 182
183 # Replace those fds with our own 183 # Replace those fds with our own
184 os.dup2(si.fileno(), sys.stdin.fileno()) 184 os.dup2(si.fileno(), sys.stdin.fileno())
185 os.dup2(so.fileno(), sys.stdout.fileno()) 185 os.dup2(so.fileno(), sys.stdout.fileno())
186 os.dup2(se.fileno(), sys.stderr.fileno()) 186 os.dup2(se.fileno(), sys.stderr.fileno())
187 187
188 function() 188 function()
189 189
190 os._exit(0) 190 os._exit(0)
191
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index c3bb1a1f43..636983edcc 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -11,7 +11,7 @@ operations. At night the cookie monster came by and
11suggested 'give me cookies on setting the variables and 11suggested 'give me cookies on setting the variables and
12things will work out'. Taking this suggestion into account 12things will work out'. Taking this suggestion into account
13applying the skills from the not yet passed 'Entwurf und 13applying the skills from the not yet passed 'Entwurf und
14Analyse von Algorithmen' lecture and the cookie 14Analyse von Algorithmen' lecture and the cookie
15monster seems to be right. We will track setVar more carefully 15monster seems to be right. We will track setVar more carefully
16to have faster update_data and expandKeys operations. 16to have faster update_data and expandKeys operations.
17 17
@@ -37,39 +37,40 @@ the speed is more critical here.
37# 37#
38#Based on functions from the base bb module, Copyright 2003 Holger Schurig 38#Based on functions from the base bb module, Copyright 2003 Holger Schurig
39 39
40import sys, os, re, types 40import sys, os, re
41if sys.argv[0][-5:] == "pydoc": 41if sys.argv[0][-5:] == "pydoc":
42 path = os.path.dirname(os.path.dirname(sys.argv[1])) 42 path = os.path.dirname(os.path.dirname(sys.argv[1]))
43else: 43else:
44 path = os.path.dirname(os.path.dirname(sys.argv[0])) 44 path = os.path.dirname(os.path.dirname(sys.argv[0]))
45sys.path.insert(0,path) 45sys.path.insert(0, path)
46from itertools import groupby
46 47
47from bb import data_smart 48from bb import data_smart
48import bb 49import bb
49 50
50class VarExpandError(Exception):
51 pass
52
53_dict_type = data_smart.DataSmart 51_dict_type = data_smart.DataSmart
54 52
55def init(): 53def init():
54 """Return a new object representing the Bitbake data"""
56 return _dict_type() 55 return _dict_type()
57 56
58def init_db(parent = None): 57def init_db(parent = None):
58 """Return a new object representing the Bitbake data,
59 optionally based on an existing object"""
59 if parent: 60 if parent:
60 return parent.createCopy() 61 return parent.createCopy()
61 else: 62 else:
62 return _dict_type() 63 return _dict_type()
63 64
64def createCopy(source): 65def createCopy(source):
65 """Link the source set to the destination 66 """Link the source set to the destination
66 If one does not find the value in the destination set, 67 If one does not find the value in the destination set,
67 search will go on to the source set to get the value. 68 search will go on to the source set to get the value.
68 Value from source are copy-on-write. i.e. any try to 69 Value from source are copy-on-write. i.e. any try to
69 modify one of them will end up putting the modified value 70 modify one of them will end up putting the modified value
70 in the destination set. 71 in the destination set.
71 """ 72 """
72 return source.createCopy() 73 return source.createCopy()
73 74
74def initVar(var, d): 75def initVar(var, d):
75 """Non-destructive var init for data structure""" 76 """Non-destructive var init for data structure"""
@@ -77,91 +78,34 @@ def initVar(var, d):
77 78
78 79
79def setVar(var, value, d): 80def setVar(var, value, d):
80 """Set a variable to a given value 81 """Set a variable to a given value"""
81 82 d.setVar(var, value)
82 Example:
83 >>> d = init()
84 >>> setVar('TEST', 'testcontents', d)
85 >>> print getVar('TEST', d)
86 testcontents
87 """
88 d.setVar(var,value)
89 83
90 84
91def getVar(var, d, exp = 0): 85def getVar(var, d, exp = 0):
92 """Gets the value of a variable 86 """Gets the value of a variable"""
93 87 return d.getVar(var, exp)
94 Example:
95 >>> d = init()
96 >>> setVar('TEST', 'testcontents', d)
97 >>> print getVar('TEST', d)
98 testcontents
99 """
100 return d.getVar(var,exp)
101 88
102 89
103def renameVar(key, newkey, d): 90def renameVar(key, newkey, d):
104 """Renames a variable from key to newkey 91 """Renames a variable from key to newkey"""
105
106 Example:
107 >>> d = init()
108 >>> setVar('TEST', 'testcontents', d)
109 >>> renameVar('TEST', 'TEST2', d)
110 >>> print getVar('TEST2', d)
111 testcontents
112 """
113 d.renameVar(key, newkey) 92 d.renameVar(key, newkey)
114 93
115def delVar(var, d): 94def delVar(var, d):
116 """Removes a variable from the data set 95 """Removes a variable from the data set"""
117
118 Example:
119 >>> d = init()
120 >>> setVar('TEST', 'testcontents', d)
121 >>> print getVar('TEST', d)
122 testcontents
123 >>> delVar('TEST', d)
124 >>> print getVar('TEST', d)
125 None
126 """
127 d.delVar(var) 96 d.delVar(var)
128 97
129def setVarFlag(var, flag, flagvalue, d): 98def setVarFlag(var, flag, flagvalue, d):
130 """Set a flag for a given variable to a given value 99 """Set a flag for a given variable to a given value"""
131 100 d.setVarFlag(var, flag, flagvalue)
132 Example:
133 >>> d = init()
134 >>> setVarFlag('TEST', 'python', 1, d)
135 >>> print getVarFlag('TEST', 'python', d)
136 1
137 """
138 d.setVarFlag(var,flag,flagvalue)
139 101
140def getVarFlag(var, flag, d): 102def getVarFlag(var, flag, d):
141 """Gets given flag from given var 103 """Gets given flag from given var"""
142 104 return d.getVarFlag(var, flag)
143 Example:
144 >>> d = init()
145 >>> setVarFlag('TEST', 'python', 1, d)
146 >>> print getVarFlag('TEST', 'python', d)
147 1
148 """
149 return d.getVarFlag(var,flag)
150 105
151def delVarFlag(var, flag, d): 106def delVarFlag(var, flag, d):
152 """Removes a given flag from the variable's flags 107 """Removes a given flag from the variable's flags"""
153 108 d.delVarFlag(var, flag)
154 Example:
155 >>> d = init()
156 >>> setVarFlag('TEST', 'testflag', 1, d)
157 >>> print getVarFlag('TEST', 'testflag', d)
158 1
159 >>> delVarFlag('TEST', 'testflag', d)
160 >>> print getVarFlag('TEST', 'testflag', d)
161 None
162
163 """
164 d.delVarFlag(var,flag)
165 109
166def setVarFlags(var, flags, d): 110def setVarFlags(var, flags, d):
167 """Set the flags for a given variable 111 """Set the flags for a given variable
@@ -170,115 +114,27 @@ def setVarFlags(var, flags, d):
170 setVarFlags will not clear previous 114 setVarFlags will not clear previous
171 flags. Think of this method as 115 flags. Think of this method as
172 addVarFlags 116 addVarFlags
173
174 Example:
175 >>> d = init()
176 >>> myflags = {}
177 >>> myflags['test'] = 'blah'
178 >>> setVarFlags('TEST', myflags, d)
179 >>> print getVarFlag('TEST', 'test', d)
180 blah
181 """ 117 """
182 d.setVarFlags(var,flags) 118 d.setVarFlags(var, flags)
183 119
184def getVarFlags(var, d): 120def getVarFlags(var, d):
185 """Gets a variable's flags 121 """Gets a variable's flags"""
186
187 Example:
188 >>> d = init()
189 >>> setVarFlag('TEST', 'test', 'blah', d)
190 >>> print getVarFlags('TEST', d)['test']
191 blah
192 """
193 return d.getVarFlags(var) 122 return d.getVarFlags(var)
194 123
195def delVarFlags(var, d): 124def delVarFlags(var, d):
196 """Removes a variable's flags 125 """Removes a variable's flags"""
197
198 Example:
199 >>> data = init()
200 >>> setVarFlag('TEST', 'testflag', 1, data)
201 >>> print getVarFlag('TEST', 'testflag', data)
202 1
203 >>> delVarFlags('TEST', data)
204 >>> print getVarFlags('TEST', data)
205 None
206
207 """
208 d.delVarFlags(var) 126 d.delVarFlags(var)
209 127
210def keys(d): 128def keys(d):
211 """Return a list of keys in d 129 """Return a list of keys in d"""
212
213 Example:
214 >>> d = init()
215 >>> setVar('TEST', 1, d)
216 >>> setVar('MOO' , 2, d)
217 >>> setVarFlag('TEST', 'test', 1, d)
218 >>> keys(d)
219 ['TEST', 'MOO']
220 """
221 return d.keys() 130 return d.keys()
222 131
223def getData(d):
224 """Returns the data object used"""
225 return d
226
227def setData(newData, d):
228 """Sets the data object to the supplied value"""
229 d = newData
230
231
232##
233## Cookie Monsters' query functions
234##
235def _get_override_vars(d, override):
236 """
237 Internal!!!
238
239 Get the Names of Variables that have a specific
240 override. This function returns a iterable
241 Set or an empty list
242 """
243 return []
244
245def _get_var_flags_triple(d):
246 """
247 Internal!!!
248
249 """
250 return []
251 132
252__expand_var_regexp__ = re.compile(r"\${[^{}]+}") 133__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
253__expand_python_regexp__ = re.compile(r"\${@.+?}") 134__expand_python_regexp__ = re.compile(r"\${@.+?}")
254 135
255def expand(s, d, varname = None): 136def expand(s, d, varname = None):
256 """Variable expansion using the data store. 137 """Variable expansion using the data store"""
257
258 Example:
259 Standard expansion:
260 >>> d = init()
261 >>> setVar('A', 'sshd', d)
262 >>> print expand('/usr/bin/${A}', d)
263 /usr/bin/sshd
264
265 Python expansion:
266 >>> d = init()
267 >>> print expand('result: ${@37 * 72}', d)
268 result: 2664
269
270 Shell expansion:
271 >>> d = init()
272 >>> print expand('${TARGET_MOO}', d)
273 ${TARGET_MOO}
274 >>> setVar('TARGET_MOO', 'yupp', d)
275 >>> print expand('${TARGET_MOO}',d)
276 yupp
277 >>> setVar('SRC_URI', 'http://somebug.${TARGET_MOO}', d)
278 >>> delVar('TARGET_MOO', d)
279 >>> print expand('${SRC_URI}', d)
280 http://somebug.${TARGET_MOO}
281 """
282 return d.expand(s, varname) 138 return d.expand(s, varname)
283 139
284def expandKeys(alterdata, readdata = None): 140def expandKeys(alterdata, readdata = None):
@@ -295,38 +151,13 @@ def expandKeys(alterdata, readdata = None):
295 continue 151 continue
296 todolist[key] = ekey 152 todolist[key] = ekey
297 153
298 # These two for loops are split for performance to maximise the 154 # These two for loops are split for performance to maximise the
299 # usefulness of the expand cache 155 # usefulness of the expand cache
300 156
301 for key in todolist: 157 for key in todolist:
302 ekey = todolist[key] 158 ekey = todolist[key]
303 renameVar(key, ekey, alterdata) 159 renameVar(key, ekey, alterdata)
304 160
305def expandData(alterdata, readdata = None):
306 """For each variable in alterdata, expand it, and update the var contents.
307 Replacements use data from readdata.
308
309 Example:
310 >>> a=init()
311 >>> b=init()
312 >>> setVar("dlmsg", "dl_dir is ${DL_DIR}", a)
313 >>> setVar("DL_DIR", "/path/to/whatever", b)
314 >>> expandData(a, b)
315 >>> print getVar("dlmsg", a)
316 dl_dir is /path/to/whatever
317 """
318 if readdata == None:
319 readdata = alterdata
320
321 for key in keys(alterdata):
322 val = getVar(key, alterdata)
323 if type(val) is not types.StringType:
324 continue
325 expanded = expand(val, readdata)
326# print "key is %s, val is %s, expanded is %s" % (key, val, expanded)
327 if val != expanded:
328 setVar(key, expanded, alterdata)
329
330def inheritFromOS(d): 161def inheritFromOS(d):
331 """Inherit variables from the environment.""" 162 """Inherit variables from the environment."""
332 for s in os.environ.keys(): 163 for s in os.environ.keys():
@@ -351,21 +182,15 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
351 if all: 182 if all:
352 oval = getVar(var, d, 0) 183 oval = getVar(var, d, 0)
353 val = getVar(var, d, 1) 184 val = getVar(var, d, 1)
354 except KeyboardInterrupt: 185 except (KeyboardInterrupt, bb.build.FuncFailed):
355 raise 186 raise
356 except: 187 except Exception, exc:
357 excname = str(sys.exc_info()[0]) 188 o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
358 if excname == "bb.build.FuncFailed":
359 raise
360 o.write('# expansion of %s threw %s\n' % (var, excname))
361 return 0 189 return 0
362 190
363 if all: 191 if all:
364 o.write('# %s=%s\n' % (var, oval)) 192 o.write('# %s=%s\n' % (var, oval))
365 193
366 if type(val) is not types.StringType:
367 return 0
368
369 if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all: 194 if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
370 return 0 195 return 0
371 196
@@ -375,10 +200,11 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
375 o.write('unset %s\n' % varExpanded) 200 o.write('unset %s\n' % varExpanded)
376 return 1 201 return 1
377 202
378 val.rstrip()
379 if not val: 203 if not val:
380 return 0 204 return 0
381 205
206 val = str(val)
207
382 if func: 208 if func:
383 # NOTE: should probably check for unbalanced {} within the var 209 # NOTE: should probably check for unbalanced {} within the var
384 o.write("%s() {\n%s\n}\n" % (varExpanded, val)) 210 o.write("%s() {\n%s\n}\n" % (varExpanded, val))
@@ -393,173 +219,22 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
393 o.write('%s="%s"\n' % (varExpanded, alter)) 219 o.write('%s="%s"\n' % (varExpanded, alter))
394 return 1 220 return 1
395 221
396
397def emit_env(o=sys.__stdout__, d = init(), all=False): 222def emit_env(o=sys.__stdout__, d = init(), all=False):
398 """Emits all items in the data store in a format such that it can be sourced by a shell.""" 223 """Emits all items in the data store in a format such that it can be sourced by a shell."""
399 224
400 env = keys(d) 225 isfunc = lambda key: bool(d.getVarFlag(key, "func"))
401 226 keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
402 for e in env: 227 grouped = groupby(keys, isfunc)
403 if getVarFlag(e, "func", d): 228 for isfunc, keys in grouped:
404 continue 229 for key in keys:
405 emit_var(e, o, d, all) and o.write('\n') 230 emit_var(key, o, d, all and not isfunc) and o.write('\n')
406
407 for e in env:
408 if not getVarFlag(e, "func", d):
409 continue
410 emit_var(e, o, d) and o.write('\n')
411 231
412def update_data(d): 232def update_data(d):
413 """Modifies the environment vars according to local overrides and commands. 233 """Performs final steps upon the datastore, including application of overrides"""
414 Examples: 234 d.finalize()
415 Appending to a variable:
416 >>> d = init()
417 >>> setVar('TEST', 'this is a', d)
418 >>> setVar('TEST_append', ' test', d)
419 >>> setVar('TEST_append', ' of the emergency broadcast system.', d)
420 >>> update_data(d)
421 >>> print getVar('TEST', d)
422 this is a test of the emergency broadcast system.
423
424 Prepending to a variable:
425 >>> setVar('TEST', 'virtual/libc', d)
426 >>> setVar('TEST_prepend', 'virtual/tmake ', d)
427 >>> setVar('TEST_prepend', 'virtual/patcher ', d)
428 >>> update_data(d)
429 >>> print getVar('TEST', d)
430 virtual/patcher virtual/tmake virtual/libc
431
432 Overrides:
433 >>> setVar('TEST_arm', 'target', d)
434 >>> setVar('TEST_ramses', 'machine', d)
435 >>> setVar('TEST_local', 'local', d)
436 >>> setVar('OVERRIDES', 'arm', d)
437
438 >>> setVar('TEST', 'original', d)
439 >>> update_data(d)
440 >>> print getVar('TEST', d)
441 target
442
443 >>> setVar('OVERRIDES', 'arm:ramses:local', d)
444 >>> setVar('TEST', 'original', d)
445 >>> update_data(d)
446 >>> print getVar('TEST', d)
447 local
448
449 CopyMonster:
450 >>> e = d.createCopy()
451 >>> setVar('TEST_foo', 'foo', e)
452 >>> update_data(e)
453 >>> print getVar('TEST', e)
454 local
455
456 >>> setVar('OVERRIDES', 'arm:ramses:local:foo', e)
457 >>> update_data(e)
458 >>> print getVar('TEST', e)
459 foo
460
461 >>> f = d.createCopy()
462 >>> setVar('TEST_moo', 'something', f)
463 >>> setVar('OVERRIDES', 'moo:arm:ramses:local:foo', e)
464 >>> update_data(e)
465 >>> print getVar('TEST', e)
466 foo
467
468
469 >>> h = init()
470 >>> setVar('SRC_URI', 'file://append.foo;patch=1 ', h)
471 >>> g = h.createCopy()
472 >>> setVar('SRC_URI_append_arm', 'file://other.foo;patch=1', g)
473 >>> setVar('OVERRIDES', 'arm:moo', g)
474 >>> update_data(g)
475 >>> print getVar('SRC_URI', g)
476 file://append.foo;patch=1 file://other.foo;patch=1
477
478 """
479 bb.msg.debug(2, bb.msg.domain.Data, "update_data()")
480
481 # now ask the cookie monster for help
482 #print "Cookie Monster"
483 #print "Append/Prepend %s" % d._special_values
484 #print "Overrides %s" % d._seen_overrides
485
486 overrides = (getVar('OVERRIDES', d, 1) or "").split(':') or []
487
488 #
489 # Well let us see what breaks here. We used to iterate
490 # over each variable and apply the override and then
491 # do the line expanding.
492 # If we have bad luck - which we will have - the keys
493 # where in some order that is so important for this
494 # method which we don't have anymore.
495 # Anyway we will fix that and write test cases this
496 # time.
497
498 #
499 # First we apply all overrides
500 # Then we will handle _append and _prepend
501 #
502
503 for o in overrides:
504 # calculate '_'+override
505 l = len(o)+1
506
507 # see if one should even try
508 if not d._seen_overrides.has_key(o):
509 continue
510
511 vars = d._seen_overrides[o]
512 for var in vars:
513 name = var[:-l]
514 try:
515 d[name] = d[var]
516 except:
517 bb.msg.note(1, bb.msg.domain.Data, "Untracked delVar")
518
519 # now on to the appends and prepends
520 if d._special_values.has_key('_append'):
521 appends = d._special_values['_append'] or []
522 for append in appends:
523 for (a, o) in getVarFlag(append, '_append', d) or []:
524 # maybe the OVERRIDE was not yet added so keep the append
525 if (o and o in overrides) or not o:
526 delVarFlag(append, '_append', d)
527 if o and not o in overrides:
528 continue
529
530 sval = getVar(append,d) or ""
531 sval+=a
532 setVar(append, sval, d)
533
534
535 if d._special_values.has_key('_prepend'):
536 prepends = d._special_values['_prepend'] or []
537
538 for prepend in prepends:
539 for (a, o) in getVarFlag(prepend, '_prepend', d) or []:
540 # maybe the OVERRIDE was not yet added so keep the prepend
541 if (o and o in overrides) or not o:
542 delVarFlag(prepend, '_prepend', d)
543 if o and not o in overrides:
544 continue
545
546 sval = a + (getVar(prepend,d) or "")
547 setVar(prepend, sval, d)
548
549 235
550def inherits_class(klass, d): 236def inherits_class(klass, d):
551 val = getVar('__inherit_cache', d) or [] 237 val = getVar('__inherit_cache', d) or []
552 if os.path.join('classes', '%s.bbclass' % klass) in val: 238 if os.path.join('classes', '%s.bbclass' % klass) in val:
553 return True 239 return True
554 return False 240 return False
555
556def _test():
557 """Start a doctest run on this module"""
558 import doctest
559 import bb
560 from bb import data
561 bb.msg.set_debug_level(0)
562 doctest.testmod(data)
563
564if __name__ == "__main__":
565 _test()
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 9067d54bfa..01a3330245 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -28,22 +28,16 @@ BitBake build tools.
28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
29# Based on functions from the base bb module, Copyright 2003 Holger Schurig 29# Based on functions from the base bb module, Copyright 2003 Holger Schurig
30 30
31import copy, os, re, sys, time, types 31import copy, re, sys
32import bb 32import bb
33from bb import utils, methodpool 33from bb import utils
34from COW import COWDictBase 34from bb.COW import COWDictBase
35from new import classobj
36 35
37 36
38__setvar_keyword__ = ["_append","_prepend"] 37__setvar_keyword__ = ["_append", "_prepend"]
39__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?') 38__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
40__expand_var_regexp__ = re.compile(r"\${[^{}]+}") 39__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
41__expand_python_regexp__ = re.compile(r"\${@.+?}") 40__expand_python_regexp__ = re.compile(r"\${@.+?}")
42_expand_globals = {
43 "os": os,
44 "bb": bb,
45 "time": time,
46}
47 41
48 42
49class DataSmart: 43class DataSmart:
@@ -55,9 +49,8 @@ class DataSmart:
55 self._seen_overrides = seen 49 self._seen_overrides = seen
56 50
57 self.expand_cache = {} 51 self.expand_cache = {}
58 self.expand_locals = {"d": self}
59 52
60 def expand(self,s, varname): 53 def expand(self, s, varname):
61 def var_sub(match): 54 def var_sub(match):
62 key = match.group()[2:-1] 55 key = match.group()[2:-1]
63 if varname and key: 56 if varname and key:
@@ -70,13 +63,12 @@ class DataSmart:
70 return match.group() 63 return match.group()
71 64
72 def python_sub(match): 65 def python_sub(match):
73 import bb
74 code = match.group()[3:-1] 66 code = match.group()[3:-1]
75 s = eval(code, _expand_globals, self.expand_locals) 67 codeobj = compile(code.strip(), varname or "<expansion>", "eval")
76 if type(s) == types.IntType: s = str(s) 68 value = utils.better_eval(codeobj, {"d": self})
77 return s 69 return str(value)
78 70
79 if type(s) is not types.StringType: # sanity check 71 if not isinstance(s, basestring): # sanity check
80 return s 72 return s
81 73
82 if varname and varname in self.expand_cache: 74 if varname and varname in self.expand_cache:
@@ -87,9 +79,8 @@ class DataSmart:
87 try: 79 try:
88 s = __expand_var_regexp__.sub(var_sub, s) 80 s = __expand_var_regexp__.sub(var_sub, s)
89 s = __expand_python_regexp__.sub(python_sub, s) 81 s = __expand_python_regexp__.sub(python_sub, s)
90 if s == olds: break 82 if s == olds:
91 if type(s) is not types.StringType: # sanity check 83 break
92 bb.msg.error(bb.msg.domain.Data, 'expansion of %s returned non-string %s' % (olds, s))
93 except KeyboardInterrupt: 84 except KeyboardInterrupt:
94 raise 85 raise
95 except: 86 except:
@@ -101,23 +92,86 @@ class DataSmart:
101 92
102 return s 93 return s
103 94
95 def finalize(self):
96 """Performs final steps upon the datastore, including application of overrides"""
97
98 overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
99
100 #
101 # Well let us see what breaks here. We used to iterate
102 # over each variable and apply the override and then
103 # do the line expanding.
104 # If we have bad luck - which we will have - the keys
105 # where in some order that is so important for this
106 # method which we don't have anymore.
107 # Anyway we will fix that and write test cases this
108 # time.
109
110 #
111 # First we apply all overrides
112 # Then we will handle _append and _prepend
113 #
114
115 for o in overrides:
116 # calculate '_'+override
117 l = len(o) + 1
118
119 # see if one should even try
120 if o not in self._seen_overrides:
121 continue
122
123 vars = self._seen_overrides[o]
124 for var in vars:
125 name = var[:-l]
126 try:
127 self[name] = self[var]
128 except Exception:
129 bb.msg.note(1, bb.msg.domain.Data, "Untracked delVar")
130
131 # now on to the appends and prepends
132 if "_append" in self._special_values:
133 appends = self._special_values["_append"] or []
134 for append in appends:
135 for (a, o) in self.getVarFlag(append, "_append") or []:
136 # maybe the OVERRIDE was not yet added so keep the append
137 if (o and o in overrides) or not o:
138 self.delVarFlag(append, "_append")
139 if o and not o in overrides:
140 continue
141
142 sval = self.getVar(append, False) or ""
143 sval += a
144 self.setVar(append, sval)
145
146
147 if "_prepend" in self._special_values:
148 prepends = self._special_values["_prepend"] or []
149
150 for prepend in prepends:
151 for (a, o) in self.getVarFlag(prepend, "_prepend") or []:
152 # maybe the OVERRIDE was not yet added so keep the prepend
153 if (o and o in overrides) or not o:
154 self.delVarFlag(prepend, "_prepend")
155 if o and not o in overrides:
156 continue
157
158 sval = a + (self.getVar(prepend, False) or "")
159 self.setVar(prepend, sval)
160
104 def initVar(self, var): 161 def initVar(self, var):
105 self.expand_cache = {} 162 self.expand_cache = {}
106 if not var in self.dict: 163 if not var in self.dict:
107 self.dict[var] = {} 164 self.dict[var] = {}
108 165
109 def _findVar(self,var): 166 def _findVar(self, var):
110 _dest = self.dict 167 dest = self.dict
168 while dest:
169 if var in dest:
170 return dest[var]
111 171
112 while (_dest and var not in _dest): 172 if "_data" not in dest:
113 if not "_data" in _dest:
114 _dest = None
115 break 173 break
116 _dest = _dest["_data"] 174 dest = dest["_data"]
117
118 if _dest and var in _dest:
119 return _dest[var]
120 return None
121 175
122 def _makeShadowCopy(self, var): 176 def _makeShadowCopy(self, var):
123 if var in self.dict: 177 if var in self.dict:
@@ -130,7 +184,7 @@ class DataSmart:
130 else: 184 else:
131 self.initVar(var) 185 self.initVar(var)
132 186
133 def setVar(self,var,value): 187 def setVar(self, var, value):
134 self.expand_cache = {} 188 self.expand_cache = {}
135 match = __setvar_regexp__.match(var) 189 match = __setvar_regexp__.match(var)
136 if match and match.group("keyword") in __setvar_keyword__: 190 if match and match.group("keyword") in __setvar_keyword__:
@@ -145,7 +199,7 @@ class DataSmart:
145 # pay the cookie monster 199 # pay the cookie monster
146 try: 200 try:
147 self._special_values[keyword].add( base ) 201 self._special_values[keyword].add( base )
148 except: 202 except KeyError:
149 self._special_values[keyword] = set() 203 self._special_values[keyword] = set()
150 self._special_values[keyword].add( base ) 204 self._special_values[keyword].add( base )
151 205
@@ -157,23 +211,23 @@ class DataSmart:
157 # more cookies for the cookie monster 211 # more cookies for the cookie monster
158 if '_' in var: 212 if '_' in var:
159 override = var[var.rfind('_')+1:] 213 override = var[var.rfind('_')+1:]
160 if not self._seen_overrides.has_key(override): 214 if override not in self._seen_overrides:
161 self._seen_overrides[override] = set() 215 self._seen_overrides[override] = set()
162 self._seen_overrides[override].add( var ) 216 self._seen_overrides[override].add( var )
163 217
164 # setting var 218 # setting var
165 self.dict[var]["content"] = value 219 self.dict[var]["content"] = value
166 220
167 def getVar(self,var,exp): 221 def getVar(self, var, exp):
168 value = self.getVarFlag(var,"content") 222 value = self.getVarFlag(var, "content")
169 223
170 if exp and value: 224 if exp and value:
171 return self.expand(value,var) 225 return self.expand(value, var)
172 return value 226 return value
173 227
174 def renameVar(self, key, newkey): 228 def renameVar(self, key, newkey):
175 """ 229 """
176 Rename the variable key to newkey 230 Rename the variable key to newkey
177 """ 231 """
178 val = self.getVar(key, 0) 232 val = self.getVar(key, 0)
179 if val is not None: 233 if val is not None:
@@ -187,30 +241,30 @@ class DataSmart:
187 dest = self.getVarFlag(newkey, i) or [] 241 dest = self.getVarFlag(newkey, i) or []
188 dest.extend(src) 242 dest.extend(src)
189 self.setVarFlag(newkey, i, dest) 243 self.setVarFlag(newkey, i, dest)
190 244
191 if self._special_values.has_key(i) and key in self._special_values[i]: 245 if i in self._special_values and key in self._special_values[i]:
192 self._special_values[i].remove(key) 246 self._special_values[i].remove(key)
193 self._special_values[i].add(newkey) 247 self._special_values[i].add(newkey)
194 248
195 self.delVar(key) 249 self.delVar(key)
196 250
197 def delVar(self,var): 251 def delVar(self, var):
198 self.expand_cache = {} 252 self.expand_cache = {}
199 self.dict[var] = {} 253 self.dict[var] = {}
200 254
201 def setVarFlag(self,var,flag,flagvalue): 255 def setVarFlag(self, var, flag, flagvalue):
202 if not var in self.dict: 256 if not var in self.dict:
203 self._makeShadowCopy(var) 257 self._makeShadowCopy(var)
204 self.dict[var][flag] = flagvalue 258 self.dict[var][flag] = flagvalue
205 259
206 def getVarFlag(self,var,flag): 260 def getVarFlag(self, var, flag):
207 local_var = self._findVar(var) 261 local_var = self._findVar(var)
208 if local_var: 262 if local_var:
209 if flag in local_var: 263 if flag in local_var:
210 return copy.copy(local_var[flag]) 264 return copy.copy(local_var[flag])
211 return None 265 return None
212 266
213 def delVarFlag(self,var,flag): 267 def delVarFlag(self, var, flag):
214 local_var = self._findVar(var) 268 local_var = self._findVar(var)
215 if not local_var: 269 if not local_var:
216 return 270 return
@@ -220,7 +274,7 @@ class DataSmart:
220 if var in self.dict and flag in self.dict[var]: 274 if var in self.dict and flag in self.dict[var]:
221 del self.dict[var][flag] 275 del self.dict[var][flag]
222 276
223 def setVarFlags(self,var,flags): 277 def setVarFlags(self, var, flags):
224 if not var in self.dict: 278 if not var in self.dict:
225 self._makeShadowCopy(var) 279 self._makeShadowCopy(var)
226 280
@@ -229,7 +283,7 @@ class DataSmart:
229 continue 283 continue
230 self.dict[var][i] = flags[i] 284 self.dict[var][i] = flags[i]
231 285
232 def getVarFlags(self,var): 286 def getVarFlags(self, var):
233 local_var = self._findVar(var) 287 local_var = self._findVar(var)
234 flags = {} 288 flags = {}
235 289
@@ -244,7 +298,7 @@ class DataSmart:
244 return flags 298 return flags
245 299
246 300
247 def delVarFlags(self,var): 301 def delVarFlags(self, var):
248 if not var in self.dict: 302 if not var in self.dict:
249 self._makeShadowCopy(var) 303 self._makeShadowCopy(var)
250 304
@@ -274,21 +328,19 @@ class DataSmart:
274 def keys(self): 328 def keys(self):
275 def _keys(d, mykey): 329 def _keys(d, mykey):
276 if "_data" in d: 330 if "_data" in d:
277 _keys(d["_data"],mykey) 331 _keys(d["_data"], mykey)
278 332
279 for key in d.keys(): 333 for key in d.keys():
280 if key != "_data": 334 if key != "_data":
281 mykey[key] = None 335 mykey[key] = None
282 keytab = {} 336 keytab = {}
283 _keys(self.dict,keytab) 337 _keys(self.dict, keytab)
284 return keytab.keys() 338 return keytab.keys()
285 339
286 def __getitem__(self,item): 340 def __getitem__(self, item):
287 #print "Warning deprecated" 341 #print "Warning deprecated"
288 return self.getVar(item, False) 342 return self.getVar(item, False)
289 343
290 def __setitem__(self,var,data): 344 def __setitem__(self, var, data):
291 #print "Warning deprecated" 345 #print "Warning deprecated"
292 self.setVar(var,data) 346 self.setVar(var, data)
293
294
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index afd5bf57c1..7731649eff 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -22,7 +22,8 @@ BitBake build tools.
22# with this program; if not, write to the Free Software Foundation, Inc., 22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 24
25import os, re, sys 25import os, sys
26import warnings
26import bb.utils 27import bb.utils
27import pickle 28import pickle
28 29
@@ -38,7 +39,7 @@ class Event:
38 self.pid = worker_pid 39 self.pid = worker_pid
39 40
40NotHandled = 0 41NotHandled = 0
41Handled = 1 42Handled = 1
42 43
43Registered = 10 44Registered = 10
44AlreadyRegistered = 14 45AlreadyRegistered = 14
@@ -48,13 +49,25 @@ _handlers = {}
48_ui_handlers = {} 49_ui_handlers = {}
49_ui_handler_seq = 0 50_ui_handler_seq = 0
50 51
52# For compatibility
53bb.utils._context["NotHandled"] = NotHandled
54bb.utils._context["Handled"] = Handled
55
51def fire_class_handlers(event, d): 56def fire_class_handlers(event, d):
57 import bb.msg
58 if isinstance(event, bb.msg.MsgBase):
59 return
60
52 for handler in _handlers: 61 for handler in _handlers:
53 h = _handlers[handler] 62 h = _handlers[handler]
54 event.data = d 63 event.data = d
55 if type(h).__name__ == "code": 64 if type(h).__name__ == "code":
56 exec(h) 65 locals = {"e": event}
57 tmpHandler(event) 66 bb.utils.simple_exec(h, locals)
67 ret = bb.utils.better_eval("tmpHandler(e)", locals)
68 if ret is not None:
69 warnings.warn("Using Handled/NotHandled in event handlers is deprecated",
70 DeprecationWarning, stacklevel = 2)
58 else: 71 else:
59 h(event) 72 h(event)
60 del event.data 73 del event.data
@@ -76,9 +89,9 @@ def fire_ui_handlers(event, d):
76def fire(event, d): 89def fire(event, d):
77 """Fire off an Event""" 90 """Fire off an Event"""
78 91
79 # We can fire class handlers in the worker process context and this is 92 # We can fire class handlers in the worker process context and this is
80 # desired so they get the task based datastore. 93 # desired so they get the task based datastore.
81 # UI handlers need to be fired in the server context so we defer this. They 94 # UI handlers need to be fired in the server context so we defer this. They
82 # don't have a datastore so the datastore context isn't a problem. 95 # don't have a datastore so the datastore context isn't a problem.
83 96
84 fire_class_handlers(event, d) 97 fire_class_handlers(event, d)
@@ -91,13 +104,13 @@ def worker_fire(event, d):
91 data = "<event>" + pickle.dumps(event) + "</event>" 104 data = "<event>" + pickle.dumps(event) + "</event>"
92 try: 105 try:
93 if os.write(worker_pipe, data) != len (data): 106 if os.write(worker_pipe, data) != len (data):
94 print "Error sending event to server (short write)" 107 print("Error sending event to server (short write)")
95 except OSError: 108 except OSError:
96 sys.exit(1) 109 sys.exit(1)
97 110
98def fire_from_worker(event, d): 111def fire_from_worker(event, d):
99 if not event.startswith("<event>") or not event.endswith("</event>"): 112 if not event.startswith("<event>") or not event.endswith("</event>"):
100 print "Error, not an event" 113 print("Error, not an event")
101 return 114 return
102 event = pickle.loads(event[7:-8]) 115 event = pickle.loads(event[7:-8])
103 fire_ui_handlers(event, d) 116 fire_ui_handlers(event, d)
@@ -222,10 +235,11 @@ class BuildCompleted(BuildBase):
222class NoProvider(Event): 235class NoProvider(Event):
223 """No Provider for an Event""" 236 """No Provider for an Event"""
224 237
225 def __init__(self, item, runtime=False): 238 def __init__(self, item, runtime=False, dependees=None):
226 Event.__init__(self) 239 Event.__init__(self)
227 self._item = item 240 self._item = item
228 self._runtime = runtime 241 self._runtime = runtime
242 self._dependees = dependees
229 243
230 def getItem(self): 244 def getItem(self):
231 return self._item 245 return self._item
@@ -284,4 +298,3 @@ class DepTreeGenerated(Event):
284 def __init__(self, depgraph): 298 def __init__(self, depgraph):
285 Event.__init__(self) 299 Event.__init__(self)
286 self._depgraph = depgraph 300 self._depgraph = depgraph
287
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index b566da4311..31b9653793 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -24,6 +24,8 @@ BitBake build tools.
24# 24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig 25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26 26
27from __future__ import absolute_import
28from __future__ import print_function
27import os, re 29import os, re
28import bb 30import bb
29from bb import data 31from bb import data
@@ -53,24 +55,6 @@ class InvalidSRCREV(Exception):
53def decodeurl(url): 55def decodeurl(url):
54 """Decodes an URL into the tokens (scheme, network location, path, 56 """Decodes an URL into the tokens (scheme, network location, path,
55 user, password, parameters). 57 user, password, parameters).
56
57 >>> decodeurl("http://www.google.com/index.html")
58 ('http', 'www.google.com', '/index.html', '', '', {})
59
60 >>> decodeurl("file://gas/COPYING")
61 ('file', '', 'gas/COPYING', '', '', {})
62
63 CVS url with username, host and cvsroot. The cvs module to check out is in the
64 parameters:
65
66 >>> decodeurl("cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg")
67 ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'})
68
69 Dito, but this time the username has a password part. And we also request a special tag
70 to check out.
71
72 >>> decodeurl("cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81")
73 ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'})
74 """ 58 """
75 59
76 m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) 60 m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
@@ -103,7 +87,7 @@ def decodeurl(url):
103 p = {} 87 p = {}
104 if parm: 88 if parm:
105 for s in parm.split(';'): 89 for s in parm.split(';'):
106 s1,s2 = s.split('=') 90 s1, s2 = s.split('=')
107 p[s1] = s2 91 p[s1] = s2
108 92
109 return (type, host, path, user, pswd, p) 93 return (type, host, path, user, pswd, p)
@@ -111,27 +95,12 @@ def decodeurl(url):
111def encodeurl(decoded): 95def encodeurl(decoded):
112 """Encodes a URL from tokens (scheme, network location, path, 96 """Encodes a URL from tokens (scheme, network location, path,
113 user, password, parameters). 97 user, password, parameters).
114
115 >>> encodeurl(['http', 'www.google.com', '/index.html', '', '', {}])
116 'http://www.google.com/index.html'
117
118 CVS with username, host and cvsroot. The cvs module to check out is in the
119 parameters:
120
121 >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}])
122 'cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg'
123
124 Dito, but this time the username has a password part. And we also request a special tag
125 to check out.
126
127 >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}])
128 'cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg'
129 """ 98 """
130 99
131 (type, host, path, user, pswd, p) = decoded 100 (type, host, path, user, pswd, p) = decoded
132 101
133 if not type or not path: 102 if not type or not path:
134 bb.msg.fatal(bb.msg.domain.Fetcher, "invalid or missing parameters for url encoding") 103 raise MissingParameterError("Type or path url components missing when encoding %s" % decoded)
135 url = '%s://' % type 104 url = '%s://' % type
136 if user: 105 if user:
137 url += "%s" % user 106 url += "%s" % user
@@ -151,15 +120,14 @@ def uri_replace(uri, uri_find, uri_replace, d):
151# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri) 120# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
152 if not uri or not uri_find or not uri_replace: 121 if not uri or not uri_find or not uri_replace:
153 bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing") 122 bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
154 uri_decoded = list(bb.decodeurl(uri)) 123 uri_decoded = list(decodeurl(uri))
155 uri_find_decoded = list(bb.decodeurl(uri_find)) 124 uri_find_decoded = list(decodeurl(uri_find))
156 uri_replace_decoded = list(bb.decodeurl(uri_replace)) 125 uri_replace_decoded = list(decodeurl(uri_replace))
157 result_decoded = ['','','','','',{}] 126 result_decoded = ['', '', '', '', '', {}]
158 for i in uri_find_decoded: 127 for i in uri_find_decoded:
159 loc = uri_find_decoded.index(i) 128 loc = uri_find_decoded.index(i)
160 result_decoded[loc] = uri_decoded[loc] 129 result_decoded[loc] = uri_decoded[loc]
161 import types 130 if isinstance(i, basestring):
162 if type(i) == types.StringType:
163 if (re.match(i, uri_decoded[loc])): 131 if (re.match(i, uri_decoded[loc])):
164 result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) 132 result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
165 if uri_find_decoded.index(i) == 2: 133 if uri_find_decoded.index(i) == 2:
@@ -174,19 +142,20 @@ def uri_replace(uri, uri_find, uri_replace, d):
174# else: 142# else:
175# for j in i: 143# for j in i:
176# FIXME: apply replacements against options 144# FIXME: apply replacements against options
177 return bb.encodeurl(result_decoded) 145 return encodeurl(result_decoded)
178 146
179methods = [] 147methods = []
180urldata_cache = {} 148urldata_cache = {}
181saved_headrevs = {} 149saved_headrevs = {}
150persistent_database_connection = {}
182 151
183def fetcher_init(d): 152def fetcher_init(d):
184 """ 153 """
185 Called to initilize the fetchers once the configuration data is known 154 Called to initialize the fetchers once the configuration data is known.
186 Calls before this must not hit the cache. 155 Calls before this must not hit the cache.
187 """ 156 """
188 pd = persist_data.PersistData(d) 157 pd = persist_data.PersistData(d, persistent_database_connection)
189 # When to drop SCM head revisions controled by user policy 158 # When to drop SCM head revisions controlled by user policy
190 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" 159 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
191 if srcrev_policy == "cache": 160 if srcrev_policy == "cache":
192 bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy) 161 bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
@@ -198,7 +167,7 @@ def fetcher_init(d):
198 pass 167 pass
199 pd.delDomain("BB_URI_HEADREVS") 168 pd.delDomain("BB_URI_HEADREVS")
200 else: 169 else:
201 bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy) 170 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
202 171
203 for m in methods: 172 for m in methods:
204 if hasattr(m, "init"): 173 if hasattr(m, "init"):
@@ -214,7 +183,7 @@ def fetcher_compare_revisons(d):
214 return true/false on whether they've changed. 183 return true/false on whether they've changed.
215 """ 184 """
216 185
217 pd = persist_data.PersistData(d) 186 pd = persist_data.PersistData(d, persistent_database_connection)
218 data = pd.getKeyValues("BB_URI_HEADREVS") 187 data = pd.getKeyValues("BB_URI_HEADREVS")
219 data2 = bb.fetch.saved_headrevs 188 data2 = bb.fetch.saved_headrevs
220 189
@@ -236,6 +205,7 @@ def fetcher_compare_revisons(d):
236 205
237def init(urls, d, setup = True): 206def init(urls, d, setup = True):
238 urldata = {} 207 urldata = {}
208
239 fn = bb.data.getVar('FILE', d, 1) 209 fn = bb.data.getVar('FILE', d, 1)
240 if fn in urldata_cache: 210 if fn in urldata_cache:
241 urldata = urldata_cache[fn] 211 urldata = urldata_cache[fn]
@@ -247,7 +217,7 @@ def init(urls, d, setup = True):
247 if setup: 217 if setup:
248 for url in urldata: 218 for url in urldata:
249 if not urldata[url].setup: 219 if not urldata[url].setup:
250 urldata[url].setup_localpath(d) 220 urldata[url].setup_localpath(d)
251 221
252 urldata_cache[fn] = urldata 222 urldata_cache[fn] = urldata
253 return urldata 223 return urldata
@@ -265,7 +235,7 @@ def go(d, urls = None):
265 ud = urldata[u] 235 ud = urldata[u]
266 m = ud.method 236 m = ud.method
267 if ud.localfile: 237 if ud.localfile:
268 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): 238 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5) and os.path.exists(ud.localfile):
269 # File already present along with md5 stamp file 239 # File already present along with md5 stamp file
270 # Touch md5 file to show activity 240 # Touch md5 file to show activity
271 try: 241 try:
@@ -275,8 +245,8 @@ def go(d, urls = None):
275 pass 245 pass
276 continue 246 continue
277 lf = bb.utils.lockfile(ud.lockfile) 247 lf = bb.utils.lockfile(ud.lockfile)
278 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): 248 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5) and os.path.exists(ud.localfile):
279 # If someone else fetched this before we got the lock, 249 # If someone else fetched this before we got the lock,
280 # notice and don't try again 250 # notice and don't try again
281 try: 251 try:
282 os.utime(ud.md5, None) 252 os.utime(ud.md5, None)
@@ -332,7 +302,7 @@ def checkstatus(d):
332 ret = try_mirrors (d, u, mirrors, True) 302 ret = try_mirrors (d, u, mirrors, True)
333 303
334 if not ret: 304 if not ret:
335 bb.msg.error(bb.msg.domain.Fetcher, "URL %s doesn't work" % u) 305 raise FetchError("URL %s doesn't work" % u)
336 306
337def localpaths(d): 307def localpaths(d):
338 """ 308 """
@@ -342,7 +312,7 @@ def localpaths(d):
342 urldata = init([], d, True) 312 urldata = init([], d, True)
343 313
344 for u in urldata: 314 for u in urldata:
345 ud = urldata[u] 315 ud = urldata[u]
346 local.append(ud.localpath) 316 local.append(ud.localpath)
347 317
348 return local 318 return local
@@ -354,15 +324,15 @@ def get_srcrev(d):
354 Return the version string for the current package 324 Return the version string for the current package
355 (usually to be used as PV) 325 (usually to be used as PV)
356 Most packages usually only have one SCM so we just pass on the call. 326 Most packages usually only have one SCM so we just pass on the call.
357 In the multi SCM case, we build a value based on SRCREV_FORMAT which must 327 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
358 have been set. 328 have been set.
359 """ 329 """
360 330
361 # 331 #
362 # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which 332 # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
363 # could translate into a call to here. If it does, we need to catch this 333 # could translate into a call to here. If it does, we need to catch this
364 # and provide some way so it knows get_srcrev is active instead of being 334 # and provide some way so it knows get_srcrev is active instead of being
365 # some number etc. hence the srcrev_internal_call tracking and the magic 335 # some number etc. hence the srcrev_internal_call tracking and the magic
366 # "SRCREVINACTION" return value. 336 # "SRCREVINACTION" return value.
367 # 337 #
368 # Neater solutions welcome! 338 # Neater solutions welcome!
@@ -372,7 +342,7 @@ def get_srcrev(d):
372 342
373 scms = [] 343 scms = []
374 344
375 # Only call setup_localpath on URIs which suppports_srcrev() 345 # Only call setup_localpath on URIs which suppports_srcrev()
376 urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) 346 urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
377 for u in urldata: 347 for u in urldata:
378 ud = urldata[u] 348 ud = urldata[u]
@@ -385,7 +355,7 @@ def get_srcrev(d):
385 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI") 355 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
386 raise ParameterError 356 raise ParameterError
387 357
388 bb.data.setVar('__BB_DONT_CACHE','1', d) 358 bb.data.setVar('__BB_DONT_CACHE', '1', d)
389 359
390 if len(scms) == 1: 360 if len(scms) == 1:
391 return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) 361 return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
@@ -408,7 +378,7 @@ def get_srcrev(d):
408 378
409def localpath(url, d, cache = True): 379def localpath(url, d, cache = True):
410 """ 380 """
411 Called from the parser with cache=False since the cache isn't ready 381 Called from the parser with cache=False since the cache isn't ready
412 at this point. Also called from classed in OE e.g. patch.bbclass 382 at this point. Also called from classed in OE e.g. patch.bbclass
413 """ 383 """
414 ud = init([url], d) 384 ud = init([url], d)
@@ -432,7 +402,7 @@ def runfetchcmd(cmd, d, quiet = False):
432 for var in exportvars: 402 for var in exportvars:
433 val = data.getVar(var, d, True) 403 val = data.getVar(var, d, True)
434 if val: 404 if val:
435 cmd = 'export ' + var + '=%s; %s' % (val, cmd) 405 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
436 406
437 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd) 407 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
438 408
@@ -440,12 +410,12 @@ def runfetchcmd(cmd, d, quiet = False):
440 stdout_handle = os.popen(cmd + " 2>&1", "r") 410 stdout_handle = os.popen(cmd + " 2>&1", "r")
441 output = "" 411 output = ""
442 412
443 while 1: 413 while True:
444 line = stdout_handle.readline() 414 line = stdout_handle.readline()
445 if not line: 415 if not line:
446 break 416 break
447 if not quiet: 417 if not quiet:
448 print line, 418 print(line, end=' ')
449 output += line 419 output += line
450 420
451 status = stdout_handle.close() or 0 421 status = stdout_handle.close() or 0
@@ -507,7 +477,7 @@ class FetchData(object):
507 """ 477 """
508 def __init__(self, url, d): 478 def __init__(self, url, d):
509 self.localfile = "" 479 self.localfile = ""
510 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d)) 480 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
511 self.date = Fetch.getSRCDate(self, d) 481 self.date = Fetch.getSRCDate(self, d)
512 self.url = url 482 self.url = url
513 if not self.user and "user" in self.parm: 483 if not self.user and "user" in self.parm:
@@ -571,7 +541,7 @@ class Fetch(object):
571 def localpath(self, url, urldata, d): 541 def localpath(self, url, urldata, d):
572 """ 542 """
573 Return the local filename of a given url assuming a successful fetch. 543 Return the local filename of a given url assuming a successful fetch.
574 Can also setup variables in urldata for use in go (saving code duplication 544 Can also setup variables in urldata for use in go (saving code duplication
575 and duplicate code execution) 545 and duplicate code execution)
576 """ 546 """
577 return url 547 return url
@@ -632,8 +602,8 @@ class Fetch(object):
632 """ 602 """
633 Return: 603 Return:
634 a) a source revision if specified 604 a) a source revision if specified
635 b) True if auto srcrev is in action 605 b) True if auto srcrev is in action
636 c) False otherwise 606 c) False otherwise
637 """ 607 """
638 608
639 if 'rev' in ud.parm: 609 if 'rev' in ud.parm:
@@ -665,7 +635,7 @@ class Fetch(object):
665 b) None otherwise 635 b) None otherwise
666 """ 636 """
667 637
668 localcount= None 638 localcount = None
669 if 'name' in ud.parm: 639 if 'name' in ud.parm:
670 pn = data.getVar("PN", d, 1) 640 pn = data.getVar("PN", d, 1)
671 localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1) 641 localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
@@ -706,7 +676,7 @@ class Fetch(object):
706 if not hasattr(self, "_latest_revision"): 676 if not hasattr(self, "_latest_revision"):
707 raise ParameterError 677 raise ParameterError
708 678
709 pd = persist_data.PersistData(d) 679 pd = persist_data.PersistData(d, persistent_database_connection)
710 key = self.generate_revision_key(url, ud, d) 680 key = self.generate_revision_key(url, ud, d)
711 rev = pd.getValue("BB_URI_HEADREVS", key) 681 rev = pd.getValue("BB_URI_HEADREVS", key)
712 if rev != None: 682 if rev != None:
@@ -718,12 +688,12 @@ class Fetch(object):
718 688
719 def sortable_revision(self, url, ud, d): 689 def sortable_revision(self, url, ud, d):
720 """ 690 """
721 691
722 """ 692 """
723 if hasattr(self, "_sortable_revision"): 693 if hasattr(self, "_sortable_revision"):
724 return self._sortable_revision(url, ud, d) 694 return self._sortable_revision(url, ud, d)
725 695
726 pd = persist_data.PersistData(d) 696 pd = persist_data.PersistData(d, persistent_database_connection)
727 key = self.generate_revision_key(url, ud, d) 697 key = self.generate_revision_key(url, ud, d)
728 698
729 latest_rev = self._build_revision(url, ud, d) 699 latest_rev = self._build_revision(url, ud, d)
@@ -758,18 +728,18 @@ class Fetch(object):
758 key = self._revision_key(url, ud, d) 728 key = self._revision_key(url, ud, d)
759 return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "") 729 return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
760 730
761import cvs 731from . import cvs
762import git 732from . import git
763import local 733from . import local
764import svn 734from . import svn
765import wget 735from . import wget
766import svk 736from . import svk
767import ssh 737from . import ssh
768import perforce 738from . import perforce
769import bzr 739from . import bzr
770import hg 740from . import hg
771import osc 741from . import osc
772import repo 742from . import repo
773 743
774methods.append(local.Local()) 744methods.append(local.Local())
775methods.append(wget.Wget()) 745methods.append(wget.Wget())
diff --git a/bitbake/lib/bb/fetch/bzr.py b/bitbake/lib/bb/fetch/bzr.py
index c6e33c3343..813d7d8c80 100644
--- a/bitbake/lib/bb/fetch/bzr.py
+++ b/bitbake/lib/bb/fetch/bzr.py
@@ -46,15 +46,15 @@ class Bzr(Fetch):
46 46
47 revision = Fetch.srcrev_internal_helper(ud, d) 47 revision = Fetch.srcrev_internal_helper(ud, d)
48 if revision is True: 48 if revision is True:
49 ud.revision = self.latest_revision(url, ud, d) 49 ud.revision = self.latest_revision(url, ud, d)
50 elif revision: 50 elif revision:
51 ud.revision = revision 51 ud.revision = revision
52 52
53 if not ud.revision: 53 if not ud.revision:
54 ud.revision = self.latest_revision(url, ud, d) 54 ud.revision = self.latest_revision(url, ud, d)
55 55
56 ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) 56 ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
57 57
58 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) 58 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
59 59
60 def _buildbzrcommand(self, ud, d, command): 60 def _buildbzrcommand(self, ud, d, command):
@@ -145,4 +145,3 @@ class Bzr(Fetch):
145 145
146 def _build_revision(self, url, ud, d): 146 def _build_revision(self, url, ud, d):
147 return ud.revision 147 return ud.revision
148
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py
index 443f521317..61976f7ef4 100644
--- a/bitbake/lib/bb/fetch/cvs.py
+++ b/bitbake/lib/bb/fetch/cvs.py
@@ -139,8 +139,8 @@ class Cvs(Fetch):
139 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory") 139 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory")
140 pkg = data.expand('${PN}', d) 140 pkg = data.expand('${PN}', d)
141 pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) 141 pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
142 moddir = os.path.join(pkgdir,localdir) 142 moddir = os.path.join(pkgdir, localdir)
143 if os.access(os.path.join(moddir,'CVS'), os.R_OK): 143 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
144 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) 144 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
145 # update sources there 145 # update sources there
146 os.chdir(moddir) 146 os.chdir(moddir)
@@ -157,7 +157,7 @@ class Cvs(Fetch):
157 try: 157 try:
158 os.rmdir(moddir) 158 os.rmdir(moddir)
159 except OSError: 159 except OSError:
160 pass 160 pass
161 raise FetchError(ud.module) 161 raise FetchError(ud.module)
162 162
163 # tar them up to a defined filename 163 # tar them up to a defined filename
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
index 41ebc5b998..8c91de9db1 100644
--- a/bitbake/lib/bb/fetch/git.py
+++ b/bitbake/lib/bb/fetch/git.py
@@ -57,12 +57,12 @@ class Git(Fetch):
57 57
58 tag = Fetch.srcrev_internal_helper(ud, d) 58 tag = Fetch.srcrev_internal_helper(ud, d)
59 if tag is True: 59 if tag is True:
60 ud.tag = self.latest_revision(url, ud, d) 60 ud.tag = self.latest_revision(url, ud, d)
61 elif tag: 61 elif tag:
62 ud.tag = tag 62 ud.tag = tag
63 63
64 if not ud.tag or ud.tag == "master": 64 if not ud.tag or ud.tag == "master":
65 ud.tag = self.latest_revision(url, ud, d) 65 ud.tag = self.latest_revision(url, ud, d)
66 66
67 subdir = ud.parm.get("subpath", "") 67 subdir = ud.parm.get("subpath", "")
68 if subdir != "": 68 if subdir != "":
@@ -114,7 +114,7 @@ class Git(Fetch):
114 114
115 os.chdir(ud.clonedir) 115 os.chdir(ud.clonedir)
116 mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) 116 mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
117 if mirror_tarballs != "0" or 'fullclone' in ud.parm: 117 if mirror_tarballs != "0" or 'fullclone' in ud.parm:
118 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") 118 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
119 runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) 119 runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
120 120
@@ -188,7 +188,7 @@ class Git(Fetch):
188 188
189 def _sortable_buildindex_disabled(self, url, ud, d, rev): 189 def _sortable_buildindex_disabled(self, url, ud, d, rev):
190 """ 190 """
191 Return a suitable buildindex for the revision specified. This is done by counting revisions 191 Return a suitable buildindex for the revision specified. This is done by counting revisions
192 using "git rev-list" which may or may not work in different circumstances. 192 using "git rev-list" which may or may not work in different circumstances.
193 """ 193 """
194 194
@@ -197,7 +197,7 @@ class Git(Fetch):
197 # Check if we have the rev already 197 # Check if we have the rev already
198 198
199 if not os.path.exists(ud.clonedir): 199 if not os.path.exists(ud.clonedir):
200 print "no repo" 200 print("no repo")
201 self.go(None, ud, d) 201 self.go(None, ud, d)
202 if not os.path.exists(ud.clonedir): 202 if not os.path.exists(ud.clonedir):
203 bb.msg.error(bb.msg.domain.Fetcher, "GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value" % (url, ud.clonedir)) 203 bb.msg.error(bb.msg.domain.Fetcher, "GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value" % (url, ud.clonedir))
@@ -213,5 +213,4 @@ class Git(Fetch):
213 213
214 buildindex = "%s" % output.split()[0] 214 buildindex = "%s" % output.split()[0]
215 bb.msg.debug(1, bb.msg.domain.Fetcher, "GIT repository for %s in %s is returning %s revisions in rev-list before %s" % (url, ud.clonedir, buildindex, rev)) 215 bb.msg.debug(1, bb.msg.domain.Fetcher, "GIT repository for %s in %s is returning %s revisions in rev-list before %s" % (url, ud.clonedir, buildindex, rev))
216 return buildindex 216 return buildindex
217
diff --git a/bitbake/lib/bb/fetch/hg.py b/bitbake/lib/bb/fetch/hg.py
index d0756382f8..efb3b5c76d 100644
--- a/bitbake/lib/bb/fetch/hg.py
+++ b/bitbake/lib/bb/fetch/hg.py
@@ -134,9 +134,9 @@ class Hg(Fetch):
134 os.chdir(ud.pkgdir) 134 os.chdir(ud.pkgdir)
135 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd) 135 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd)
136 runfetchcmd(fetchcmd, d) 136 runfetchcmd(fetchcmd, d)
137 137
138 # Even when we clone (fetch), we still need to update as hg's clone 138 # Even when we clone (fetch), we still need to update as hg's clone
139 # won't checkout the specified revision if its on a branch 139 # won't checkout the specified revision if its on a branch
140 updatecmd = self._buildhgcommand(ud, d, "update") 140 updatecmd = self._buildhgcommand(ud, d, "update")
141 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) 141 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd)
142 runfetchcmd(updatecmd, d) 142 runfetchcmd(updatecmd, d)
@@ -170,4 +170,3 @@ class Hg(Fetch):
170 Return a unique key for the url 170 Return a unique key for the url
171 """ 171 """
172 return "hg:" + ud.moddir 172 return "hg:" + ud.moddir
173
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py
index f9bdf589cb..882a2c4602 100644
--- a/bitbake/lib/bb/fetch/local.py
+++ b/bitbake/lib/bb/fetch/local.py
@@ -27,6 +27,7 @@ BitBake build tools.
27 27
28import os 28import os
29import bb 29import bb
30import bb.utils
30from bb import data 31from bb import data
31from bb.fetch import Fetch 32from bb.fetch import Fetch
32 33
@@ -47,7 +48,7 @@ class Local(Fetch):
47 if path[0] != "/": 48 if path[0] != "/":
48 filespath = data.getVar('FILESPATH', d, 1) 49 filespath = data.getVar('FILESPATH', d, 1)
49 if filespath: 50 if filespath:
50 newpath = bb.which(filespath, path) 51 newpath = bb.utils.which(filespath, path)
51 if not newpath: 52 if not newpath:
52 filesdir = data.getVar('FILESDIR', d, 1) 53 filesdir = data.getVar('FILESDIR', d, 1)
53 if filesdir: 54 if filesdir:
@@ -65,8 +66,8 @@ class Local(Fetch):
65 Check the status of the url 66 Check the status of the url
66 """ 67 """
67 if urldata.localpath.find("*") != -1: 68 if urldata.localpath.find("*") != -1:
68 bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url) 69 bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url)
69 return True 70 return True
70 if os.path.exists(urldata.localpath): 71 if os.path.exists(urldata.localpath):
71 return True 72 return True
72 return False 73 return False
diff --git a/bitbake/lib/bb/fetch/osc.py b/bitbake/lib/bb/fetch/osc.py
index 548dd9d074..ed773939b0 100644
--- a/bitbake/lib/bb/fetch/osc.py
+++ b/bitbake/lib/bb/fetch/osc.py
@@ -16,7 +16,7 @@ from bb.fetch import MissingParameterError
16from bb.fetch import runfetchcmd 16from bb.fetch import runfetchcmd
17 17
18class Osc(Fetch): 18class Osc(Fetch):
19 """Class to fetch a module or modules from Opensuse build server 19 """Class to fetch a module or modules from Opensuse build server
20 repositories.""" 20 repositories."""
21 21
22 def supports(self, url, ud, d): 22 def supports(self, url, ud, d):
@@ -64,7 +64,7 @@ class Osc(Fetch):
64 proto = "ocs" 64 proto = "ocs"
65 if "proto" in ud.parm: 65 if "proto" in ud.parm:
66 proto = ud.parm["proto"] 66 proto = ud.parm["proto"]
67 67
68 options = [] 68 options = []
69 69
70 config = "-c %s" % self.generate_config(ud, d) 70 config = "-c %s" % self.generate_config(ud, d)
@@ -108,7 +108,7 @@ class Osc(Fetch):
108 os.chdir(ud.pkgdir) 108 os.chdir(ud.pkgdir)
109 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd) 109 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd)
110 runfetchcmd(oscfetchcmd, d) 110 runfetchcmd(oscfetchcmd, d)
111 111
112 os.chdir(os.path.join(ud.pkgdir + ud.path)) 112 os.chdir(os.path.join(ud.pkgdir + ud.path))
113 # tar them up to a defined filename 113 # tar them up to a defined filename
114 try: 114 try:
@@ -131,7 +131,7 @@ class Osc(Fetch):
131 131
132 config_path = "%s/oscrc" % data.expand('${OSCDIR}', d) 132 config_path = "%s/oscrc" % data.expand('${OSCDIR}', d)
133 if (os.path.exists(config_path)): 133 if (os.path.exists(config_path)):
134 os.remove(config_path) 134 os.remove(config_path)
135 135
136 f = open(config_path, 'w') 136 f = open(config_path, 'w')
137 f.write("[general]\n") 137 f.write("[general]\n")
@@ -146,5 +146,5 @@ class Osc(Fetch):
146 f.write("user = %s\n" % ud.parm["user"]) 146 f.write("user = %s\n" % ud.parm["user"])
147 f.write("pass = %s\n" % ud.parm["pswd"]) 147 f.write("pass = %s\n" % ud.parm["pswd"])
148 f.close() 148 f.close()
149 149
150 return config_path 150 return config_path
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py
index e2c3421089..1c74cff349 100644
--- a/bitbake/lib/bb/fetch/perforce.py
+++ b/bitbake/lib/bb/fetch/perforce.py
@@ -25,6 +25,7 @@ BitBake build tools.
25# 25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig 26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27 27
28from future_builtins import zip
28import os 29import os
29import bb 30import bb
30from bb import data 31from bb import data
@@ -35,15 +36,15 @@ class Perforce(Fetch):
35 def supports(self, url, ud, d): 36 def supports(self, url, ud, d):
36 return ud.type in ['p4'] 37 return ud.type in ['p4']
37 38
38 def doparse(url,d): 39 def doparse(url, d):
39 parm = {} 40 parm = {}
40 path = url.split("://")[1] 41 path = url.split("://")[1]
41 delim = path.find("@"); 42 delim = path.find("@");
42 if delim != -1: 43 if delim != -1:
43 (user,pswd,host,port) = path.split('@')[0].split(":") 44 (user, pswd, host, port) = path.split('@')[0].split(":")
44 path = path.split('@')[1] 45 path = path.split('@')[1]
45 else: 46 else:
46 (host,port) = data.getVar('P4PORT', d).split(':') 47 (host, port) = data.getVar('P4PORT', d).split(':')
47 user = "" 48 user = ""
48 pswd = "" 49 pswd = ""
49 50
@@ -53,19 +54,19 @@ class Perforce(Fetch):
53 plist = path.split(';') 54 plist = path.split(';')
54 for item in plist: 55 for item in plist:
55 if item.count('='): 56 if item.count('='):
56 (key,value) = item.split('=') 57 (key, value) = item.split('=')
57 keys.append(key) 58 keys.append(key)
58 values.append(value) 59 values.append(value)
59 60
60 parm = dict(zip(keys,values)) 61 parm = dict(zip(keys, values))
61 path = "//" + path.split(';')[0] 62 path = "//" + path.split(';')[0]
62 host += ":%s" % (port) 63 host += ":%s" % (port)
63 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) 64 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
64 65
65 return host,path,user,pswd,parm 66 return host, path, user, pswd, parm
66 doparse = staticmethod(doparse) 67 doparse = staticmethod(doparse)
67 68
68 def getcset(d, depot,host,user,pswd,parm): 69 def getcset(d, depot, host, user, pswd, parm):
69 p4opt = "" 70 p4opt = ""
70 if "cset" in parm: 71 if "cset" in parm:
71 return parm["cset"]; 72 return parm["cset"];
@@ -95,9 +96,9 @@ class Perforce(Fetch):
95 return cset.split(' ')[1] 96 return cset.split(' ')[1]
96 getcset = staticmethod(getcset) 97 getcset = staticmethod(getcset)
97 98
98 def localpath(self, url, ud, d): 99 def localpath(self, url, ud, d):
99 100
100 (host,path,user,pswd,parm) = Perforce.doparse(url,d) 101 (host, path, user, pswd, parm) = Perforce.doparse(url, d)
101 102
102 # If a label is specified, we use that as our filename 103 # If a label is specified, we use that as our filename
103 104
@@ -115,7 +116,7 @@ class Perforce(Fetch):
115 116
116 cset = Perforce.getcset(d, path, host, user, pswd, parm) 117 cset = Perforce.getcset(d, path, host, user, pswd, parm)
117 118
118 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host,base.replace('/', '.'), cset), d) 119 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
119 120
120 return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) 121 return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
121 122
@@ -124,7 +125,7 @@ class Perforce(Fetch):
124 Fetch urls 125 Fetch urls
125 """ 126 """
126 127
127 (host,depot,user,pswd,parm) = Perforce.doparse(loc, d) 128 (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
128 129
129 if depot.find('/...') != -1: 130 if depot.find('/...') != -1:
130 path = depot[:depot.find('/...')] 131 path = depot[:depot.find('/...')]
@@ -160,14 +161,14 @@ class Perforce(Fetch):
160 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") 161 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
161 tmpfile = tmppipe.readline().strip() 162 tmpfile = tmppipe.readline().strip()
162 if not tmpfile: 163 if not tmpfile:
163 bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") 164 bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
164 raise FetchError(module) 165 raise FetchError(module)
165 166
166 if "label" in parm: 167 if "label" in parm:
167 depot = "%s@%s" % (depot,parm["label"]) 168 depot = "%s@%s" % (depot, parm["label"])
168 else: 169 else:
169 cset = Perforce.getcset(d, depot, host, user, pswd, parm) 170 cset = Perforce.getcset(d, depot, host, user, pswd, parm)
170 depot = "%s@%s" % (depot,cset) 171 depot = "%s@%s" % (depot, cset)
171 172
172 os.chdir(tmpfile) 173 os.chdir(tmpfile)
173 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) 174 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
@@ -175,12 +176,12 @@ class Perforce(Fetch):
175 p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) 176 p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
176 177
177 if not p4file: 178 if not p4file:
178 bb.error("Fetch: unable to get the P4 files from %s" % (depot)) 179 bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to get the P4 files from %s" % (depot))
179 raise FetchError(module) 180 raise FetchError(module)
180 181
181 count = 0 182 count = 0
182 183
183 for file in p4file: 184 for file in p4file:
184 list = file.split() 185 list = file.split()
185 186
186 if list[2] == "delete": 187 if list[2] == "delete":
@@ -189,11 +190,11 @@ class Perforce(Fetch):
189 dest = list[0][len(path)+1:] 190 dest = list[0][len(path)+1:]
190 where = dest.find("#") 191 where = dest.find("#")
191 192
192 os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module,dest[:where],list[0])) 193 os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
193 count = count + 1 194 count = count + 1
194 195
195 if count == 0: 196 if count == 0:
196 bb.error("Fetch: No files gathered from the P4 fetch") 197 bb.msg.error(bb.msg.domain.Fetcher, "Fetch: No files gathered from the P4 fetch")
197 raise FetchError(module) 198 raise FetchError(module)
198 199
199 myret = os.system("tar -czf %s %s" % (ud.localpath, module)) 200 myret = os.system("tar -czf %s %s" % (ud.localpath, module))
@@ -205,5 +206,3 @@ class Perforce(Fetch):
205 raise FetchError(module) 206 raise FetchError(module)
206 # cleanup 207 # cleanup
207 os.system('rm -rf %s' % tmpfile) 208 os.system('rm -rf %s' % tmpfile)
208
209
diff --git a/bitbake/lib/bb/fetch/repo.py b/bitbake/lib/bb/fetch/repo.py
index 34c32fe0bb..883310b019 100644
--- a/bitbake/lib/bb/fetch/repo.py
+++ b/bitbake/lib/bb/fetch/repo.py
@@ -23,11 +23,10 @@ BitBake "Fetch" repo (git) implementation
23# with this program; if not, write to the Free Software Foundation, Inc., 23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25 25
26import os, re 26import os
27import bb 27import bb
28from bb import data 28from bb import data
29from bb.fetch import Fetch 29from bb.fetch import Fetch
30from bb.fetch import FetchError
31from bb.fetch import runfetchcmd 30from bb.fetch import runfetchcmd
32 31
33class Repo(Fetch): 32class Repo(Fetch):
diff --git a/bitbake/lib/bb/fetch/ssh.py b/bitbake/lib/bb/fetch/ssh.py
index 68e6fdb1df..86c76f4e44 100644
--- a/bitbake/lib/bb/fetch/ssh.py
+++ b/bitbake/lib/bb/fetch/ssh.py
@@ -114,5 +114,5 @@ class SSH(Fetch):
114 114
115 (exitstatus, output) = commands.getstatusoutput(cmd) 115 (exitstatus, output) = commands.getstatusoutput(cmd)
116 if exitstatus != 0: 116 if exitstatus != 0:
117 print output 117 print(output)
118 raise FetchError('Unable to fetch %s' % url) 118 raise FetchError('Unable to fetch %s' % url)
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
index ba9f6ab109..375e8df055 100644
--- a/bitbake/lib/bb/fetch/svn.py
+++ b/bitbake/lib/bb/fetch/svn.py
@@ -78,7 +78,7 @@ class Svn(Fetch):
78 ud.revision = rev 78 ud.revision = rev
79 ud.date = "" 79 ud.date = ""
80 else: 80 else:
81 ud.revision = "" 81 ud.revision = ""
82 82
83 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) 83 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
84 84
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py
index ae1c6ad136..dcc58c75e8 100644
--- a/bitbake/lib/bb/fetch/wget.py
+++ b/bitbake/lib/bb/fetch/wget.py
@@ -30,6 +30,8 @@ import bb
30from bb import data 30from bb import data
31from bb.fetch import Fetch 31from bb.fetch import Fetch
32from bb.fetch import FetchError 32from bb.fetch import FetchError
33from bb.fetch import encodeurl, decodeurl
34from bb.fetch import runfetchcmd
33 35
34class Wget(Fetch): 36class Wget(Fetch):
35 """Class to fetch urls via 'wget'""" 37 """Class to fetch urls via 'wget'"""
@@ -37,11 +39,11 @@ class Wget(Fetch):
37 """ 39 """
38 Check to see if a given url can be fetched with wget. 40 Check to see if a given url can be fetched with wget.
39 """ 41 """
40 return ud.type in ['http','https','ftp'] 42 return ud.type in ['http', 'https', 'ftp']
41 43
42 def localpath(self, url, ud, d): 44 def localpath(self, url, ud, d):
43 45
44 url = bb.encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}]) 46 url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
45 ud.basename = os.path.basename(ud.path) 47 ud.basename = os.path.basename(ud.path)
46 ud.localfile = data.expand(os.path.basename(url), d) 48 ud.localfile = data.expand(os.path.basename(url), d)
47 49
@@ -60,37 +62,16 @@ class Wget(Fetch):
60 fetchcmd = data.getVar("FETCHCOMMAND", d, 1) 62 fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
61 63
62 uri = uri.split(";")[0] 64 uri = uri.split(";")[0]
63 uri_decoded = list(bb.decodeurl(uri)) 65 uri_decoded = list(decodeurl(uri))
64 uri_type = uri_decoded[0] 66 uri_type = uri_decoded[0]
65 uri_host = uri_decoded[1] 67 uri_host = uri_decoded[1]
66 68
67 bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
68 fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) 69 fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
69 fetchcmd = fetchcmd.replace("${FILE}", ud.basename) 70 fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
70 httpproxy = None 71
71 ftpproxy = None 72 bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
72 if uri_type == 'http':
73 httpproxy = data.getVar("HTTP_PROXY", d, True)
74 httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
75 for p in httpproxy_ignore:
76 if uri_host.endswith(p):
77 httpproxy = None
78 break
79 if uri_type == 'ftp':
80 ftpproxy = data.getVar("FTP_PROXY", d, True)
81 ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
82 for p in ftpproxy_ignore:
83 if uri_host.endswith(p):
84 ftpproxy = None
85 break
86 if httpproxy:
87 fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd
88 if ftpproxy:
89 fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd
90 bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) 73 bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
91 ret = os.system(fetchcmd) 74 runfetchcmd(fetchcmd, d)
92 if ret != 0:
93 return False
94 75
95 # Sanity check since wget can pretend it succeed when it didn't 76 # Sanity check since wget can pretend it succeed when it didn't
96 # Also, this used to happen if sourceforge sent us to the mirror page 77 # Also, this used to happen if sourceforge sent us to the mirror page
diff --git a/bitbake/lib/bb/methodpool.py b/bitbake/lib/bb/methodpool.py
index f43c4a0580..1485b1357d 100644
--- a/bitbake/lib/bb/methodpool.py
+++ b/bitbake/lib/bb/methodpool.py
@@ -27,7 +27,7 @@
27 a method pool to do this task. 27 a method pool to do this task.
28 28
29 This pool will be used to compile and execute the functions. It 29 This pool will be used to compile and execute the functions. It
30 will be smart enough to 30 will be smart enough to
31""" 31"""
32 32
33from bb.utils import better_compile, better_exec 33from bb.utils import better_compile, better_exec
@@ -43,8 +43,8 @@ def insert_method(modulename, code, fn):
43 Add code of a module should be added. The methods 43 Add code of a module should be added. The methods
44 will be simply added, no checking will be done 44 will be simply added, no checking will be done
45 """ 45 """
46 comp = better_compile(code, "<bb>", fn ) 46 comp = better_compile(code, modulename, fn )
47 better_exec(comp, __builtins__, code, fn) 47 better_exec(comp, None, code, fn)
48 48
49 # now some instrumentation 49 # now some instrumentation
50 code = comp.co_names 50 code = comp.co_names
@@ -59,7 +59,7 @@ def insert_method(modulename, code, fn):
59def check_insert_method(modulename, code, fn): 59def check_insert_method(modulename, code, fn):
60 """ 60 """
61 Add the code if it wasnt added before. The module 61 Add the code if it wasnt added before. The module
62 name will be used for that 62 name will be used for that
63 63
64 Variables: 64 Variables:
65 @modulename a short name e.g. base.bbclass 65 @modulename a short name e.g. base.bbclass
@@ -81,4 +81,4 @@ def get_parsed_dict():
81 """ 81 """
82 shortcut 82 shortcut
83 """ 83 """
84 return _parsed_methods 84 return _parsed_methods
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 3fcf7091be..cea5efb5a4 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -22,26 +22,32 @@ Message handling infrastructure for bitbake
22# with this program; if not, write to the Free Software Foundation, Inc., 22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 24
25import sys, bb 25import sys
26from bb import event 26import collections
27 27import bb
28debug_level = {} 28import bb.event
29 29
30debug_level = collections.defaultdict(lambda: 0)
30verbose = False 31verbose = False
31 32
32domain = bb.utils.Enum( 33def _NamedTuple(name, fields):
33 'Build', 34 Tuple = collections.namedtuple(name, " ".join(fields))
34 'Cache', 35 return Tuple(*range(len(fields)))
35 'Collection', 36
36 'Data', 37domain = _NamedTuple("Domain", (
37 'Depends', 38 "Default",
38 'Fetcher', 39 "Build",
39 'Parsing', 40 "Cache",
40 'PersistData', 41 "Collection",
41 'Provider', 42 "Data",
42 'RunQueue', 43 "Depends",
43 'TaskData', 44 "Fetcher",
44 'Util') 45 "Parsing",
46 "PersistData",
47 "Provider",
48 "RunQueue",
49 "TaskData",
50 "Util"))
45 51
46 52
47class MsgBase(bb.event.Event): 53class MsgBase(bb.event.Event):
@@ -49,7 +55,7 @@ class MsgBase(bb.event.Event):
49 55
50 def __init__(self, msg): 56 def __init__(self, msg):
51 self._message = msg 57 self._message = msg
52 event.Event.__init__(self) 58 bb.event.Event.__init__(self)
53 59
54class MsgDebug(MsgBase): 60class MsgDebug(MsgBase):
55 """Debug Message""" 61 """Debug Message"""
@@ -74,52 +80,62 @@ class MsgPlain(MsgBase):
74# 80#
75 81
76def set_debug_level(level): 82def set_debug_level(level):
77 bb.msg.debug_level = {} 83 for d in domain:
78 for domain in bb.msg.domain: 84 debug_level[d] = level
79 bb.msg.debug_level[domain] = level 85 debug_level[domain.Default] = level
80 bb.msg.debug_level['default'] = level 86
87def get_debug_level(msgdomain = domain.Default):
88 return debug_level[msgdomain]
81 89
82def set_verbose(level): 90def set_verbose(level):
83 bb.msg.verbose = level 91 verbose = level
84 92
85def set_debug_domains(domains): 93def set_debug_domains(strdomains):
86 for domain in domains: 94 for domainstr in strdomains:
87 found = False 95 for d in domain:
88 for ddomain in bb.msg.domain: 96 if domain._fields[d] == domainstr:
89 if domain == str(ddomain): 97 debug_level[d] += 1
90 bb.msg.debug_level[ddomain] = bb.msg.debug_level[ddomain] + 1 98 break
91 found = True 99 else:
92 if not found: 100 warn(None, "Logging domain %s is not valid, ignoring" % domainstr)
93 bb.msg.warn(None, "Logging domain %s is not valid, ignoring" % domain)
94 101
95# 102#
96# Message handling functions 103# Message handling functions
97# 104#
98 105
99def debug(level, domain, msg, fn = None): 106def debug(level, msgdomain, msg, fn = None):
100 if not domain: 107 if not msgdomain:
101 domain = 'default' 108 msgdomain = domain.Default
102 if debug_level[domain] >= level: 109
110 if debug_level[msgdomain] >= level:
103 bb.event.fire(MsgDebug(msg), None) 111 bb.event.fire(MsgDebug(msg), None)
112 if not bb.event._ui_handlers:
113 print('DEBUG: ' + msg)
114
115def note(level, msgdomain, msg, fn = None):
116 if not msgdomain:
117 msgdomain = domain.Default
104 118
105def note(level, domain, msg, fn = None): 119 if level == 1 or verbose or debug_level[msgdomain] >= 1:
106 if not domain:
107 domain = 'default'
108 if level == 1 or verbose or debug_level[domain] >= 1:
109 bb.event.fire(MsgNote(msg), None) 120 bb.event.fire(MsgNote(msg), None)
121 if not bb.event._ui_handlers:
122 print('NOTE: ' + msg)
110 123
111def warn(domain, msg, fn = None): 124def warn(msgdomain, msg, fn = None):
112 bb.event.fire(MsgWarn(msg), None) 125 bb.event.fire(MsgWarn(msg), None)
126 if not bb.event._ui_handlers:
127 print('WARNING: ' + msg)
113 128
114def error(domain, msg, fn = None): 129def error(msgdomain, msg, fn = None):
115 bb.event.fire(MsgError(msg), None) 130 bb.event.fire(MsgError(msg), None)
116 print 'ERROR: ' + msg 131 print 'ERROR: ' + msg
117 132
118def fatal(domain, msg, fn = None): 133def fatal(msgdomain, msg, fn = None):
119 bb.event.fire(MsgFatal(msg), None) 134 bb.event.fire(MsgFatal(msg), None)
120 print 'FATAL: ' + msg 135 print('FATAL: ' + msg)
121 sys.exit(1) 136 sys.exit(1)
122 137
123def plain(msg, fn = None): 138def plain(msg, fn = None):
124 bb.event.fire(MsgPlain(msg), None) 139 bb.event.fire(MsgPlain(msg), None)
125 140 if not bb.event._ui_handlers:
141 print(msg)
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index 2a7897cdf2..95f372b00b 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -24,11 +24,10 @@ File parsers for the BitBake build tools.
24# 24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig 25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26 26
27__all__ = [ 'ParseError', 'SkipPackage', 'cached_mtime', 'mark_dependency',
28 'supports', 'handle', 'init' ]
29handlers = [] 27handlers = []
30 28
31import bb, os 29import bb, os
30import bb.utils
32 31
33class ParseError(Exception): 32class ParseError(Exception):
34 """Exception raised when parsing fails""" 33 """Exception raised when parsing fails"""
@@ -38,12 +37,12 @@ class SkipPackage(Exception):
38 37
39__mtime_cache = {} 38__mtime_cache = {}
40def cached_mtime(f): 39def cached_mtime(f):
41 if not __mtime_cache.has_key(f): 40 if f not in __mtime_cache:
42 __mtime_cache[f] = os.stat(f)[8] 41 __mtime_cache[f] = os.stat(f)[8]
43 return __mtime_cache[f] 42 return __mtime_cache[f]
44 43
45def cached_mtime_noerror(f): 44def cached_mtime_noerror(f):
46 if not __mtime_cache.has_key(f): 45 if f not in __mtime_cache:
47 try: 46 try:
48 __mtime_cache[f] = os.stat(f)[8] 47 __mtime_cache[f] = os.stat(f)[8]
49 except OSError: 48 except OSError:
@@ -57,8 +56,8 @@ def update_mtime(f):
57def mark_dependency(d, f): 56def mark_dependency(d, f):
58 if f.startswith('./'): 57 if f.startswith('./'):
59 f = "%s/%s" % (os.getcwd(), f[2:]) 58 f = "%s/%s" % (os.getcwd(), f[2:])
60 deps = bb.data.getVar('__depends', d) or [] 59 deps = bb.data.getVar('__depends', d) or set()
61 deps.append( (f, cached_mtime(f)) ) 60 deps.update([(f, cached_mtime(f))])
62 bb.data.setVar('__depends', deps, d) 61 bb.data.setVar('__depends', deps, d)
63 62
64def supports(fn, data): 63def supports(fn, data):
@@ -82,9 +81,11 @@ def init(fn, data):
82 81
83def resolve_file(fn, d): 82def resolve_file(fn, d):
84 if not os.path.isabs(fn): 83 if not os.path.isabs(fn):
85 fn = bb.which(bb.data.getVar("BBPATH", d, 1), fn) 84 bbpath = bb.data.getVar("BBPATH", d, True)
86 if not fn: 85 newfn = bb.which(bbpath, fn)
87 raise IOError("file %s not found" % fn) 86 if not newfn:
87 raise IOError("file %s not found in %s" % (fn, bbpath))
88 fn = newfn
88 89
89 bb.msg.debug(2, bb.msg.domain.Parsing, "LOAD %s" % fn) 90 bb.msg.debug(2, bb.msg.domain.Parsing, "LOAD %s" % fn)
90 return fn 91 return fn
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 59aa44bee0..dae2e11154 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -21,8 +21,11 @@
21# with this program; if not, write to the Free Software Foundation, Inc., 21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23 23
24from __future__ import absolute_import
25from future_builtins import filter
24import bb, re, string 26import bb, re, string
25from itertools import chain 27from bb import methodpool
28import itertools
26 29
27__word__ = re.compile(r"\S+") 30__word__ = re.compile(r"\S+")
28__parsed_methods__ = bb.methodpool.get_parsed_dict() 31__parsed_methods__ = bb.methodpool.get_parsed_dict()
@@ -30,7 +33,8 @@ _bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]")
30 33
31class StatementGroup(list): 34class StatementGroup(list):
32 def eval(self, data): 35 def eval(self, data):
33 map(lambda x: x.eval(data), self) 36 for statement in self:
37 statement.eval(data)
34 38
35class AstNode(object): 39class AstNode(object):
36 pass 40 pass
@@ -103,7 +107,6 @@ class DataNode(AstNode):
103 val = groupd["value"] 107 val = groupd["value"]
104 108
105 if 'flag' in groupd and groupd['flag'] != None: 109 if 'flag' in groupd and groupd['flag'] != None:
106 bb.msg.debug(3, bb.msg.domain.Parsing, "setVarFlag(%s, %s, %s, data)" % (key, groupd['flag'], val))
107 bb.data.setVarFlag(key, groupd['flag'], val, data) 110 bb.data.setVarFlag(key, groupd['flag'], val, data)
108 elif groupd["lazyques"]: 111 elif groupd["lazyques"]:
109 assigned = bb.data.getVar("__lazy_assigned", data) or [] 112 assigned = bb.data.getVar("__lazy_assigned", data) or []
@@ -143,7 +146,7 @@ class PythonMethodNode(AstNode):
143 # Note we will add root to parsedmethods after having parse 146 # Note we will add root to parsedmethods after having parse
144 # 'this' file. This means we will not parse methods from 147 # 'this' file. This means we will not parse methods from
145 # bb classes twice 148 # bb classes twice
146 if not self.root in __parsed_methods__: 149 if not bb.methodpool.parsed_module(self.root):
147 text = '\n'.join(self.body) 150 text = '\n'.join(self.body)
148 bb.methodpool.insert_method(self.root, text, self.fn) 151 bb.methodpool.insert_method(self.root, text, self.fn)
149 152
@@ -254,7 +257,7 @@ class InheritNode(AstNode):
254 257
255 def eval(self, data): 258 def eval(self, data):
256 bb.parse.BBHandler.inherit(self.n, data) 259 bb.parse.BBHandler.inherit(self.n, data)
257 260
258def handleInclude(statements, m, fn, lineno, force): 261def handleInclude(statements, m, fn, lineno, force):
259 statements.append(IncludeNode(m.group(1), fn, lineno, force)) 262 statements.append(IncludeNode(m.group(1), fn, lineno, force))
260 263
@@ -293,7 +296,7 @@ def handleInherit(statements, m):
293 n = __word__.findall(files) 296 n = __word__.findall(files)
294 statements.append(InheritNode(m.group(1))) 297 statements.append(InheritNode(m.group(1)))
295 298
296def finalise(fn, d): 299def finalize(fn, d):
297 for lazykey in bb.data.getVar("__lazy_assigned", d) or (): 300 for lazykey in bb.data.getVar("__lazy_assigned", d) or ():
298 if bb.data.getVar(lazykey, d) is None: 301 if bb.data.getVar(lazykey, d) is None:
299 val = bb.data.getVarFlag(lazykey, "defaultval", d) 302 val = bb.data.getVarFlag(lazykey, "defaultval", d)
@@ -301,35 +304,16 @@ def finalise(fn, d):
301 304
302 bb.data.expandKeys(d) 305 bb.data.expandKeys(d)
303 bb.data.update_data(d) 306 bb.data.update_data(d)
304 anonqueue = bb.data.getVar("__anonqueue", d, 1) or [] 307 code = []
305 body = [x['content'] for x in anonqueue] 308 for funcname in bb.data.getVar("__BBANONFUNCS", d) or []:
306 flag = { 'python' : 1, 'func' : 1 } 309 code.append("%s(d)" % funcname)
307 bb.data.setVar("__anonfunc", "\n".join(body), d) 310 bb.utils.simple_exec("\n".join(code), {"d": d})
308 bb.data.setVarFlags("__anonfunc", flag, d)
309 from bb import build
310 try:
311 t = bb.data.getVar('T', d)
312 bb.data.setVar('T', '${TMPDIR}/anonfunc/', d)
313 anonfuncs = bb.data.getVar('__BBANONFUNCS', d) or []
314 code = ""
315 for f in anonfuncs:
316 code = code + " %s(d)\n" % f
317 bb.data.setVar("__anonfunc", code, d)
318 build.exec_func("__anonfunc", d)
319 bb.data.delVar('T', d)
320 if t:
321 bb.data.setVar('T', t, d)
322 except Exception, e:
323 bb.msg.debug(1, bb.msg.domain.Parsing, "Exception when executing anonymous function: %s" % e)
324 raise
325 bb.data.delVar("__anonqueue", d)
326 bb.data.delVar("__anonfunc", d)
327 bb.data.update_data(d) 311 bb.data.update_data(d)
328 312
329 all_handlers = {} 313 all_handlers = {}
330 for var in bb.data.getVar('__BBHANDLERS', d) or []: 314 for var in bb.data.getVar('__BBHANDLERS', d) or []:
331 # try to add the handler 315 # try to add the handler
332 handler = bb.data.getVar(var,d) 316 handler = bb.data.getVar(var, d)
333 bb.event.register(var, handler) 317 bb.event.register(var, handler)
334 318
335 tasklist = bb.data.getVar('__BBTASKS', d) or [] 319 tasklist = bb.data.getVar('__BBTASKS', d) or []
@@ -360,7 +344,7 @@ def _expand_versions(versions):
360 versions = iter(versions) 344 versions = iter(versions)
361 while True: 345 while True:
362 try: 346 try:
363 version = versions.next() 347 version = next(versions)
364 except StopIteration: 348 except StopIteration:
365 break 349 break
366 350
@@ -370,14 +354,14 @@ def _expand_versions(versions):
370 else: 354 else:
371 newversions = expand_one(version, int(range_ver.group("from")), 355 newversions = expand_one(version, int(range_ver.group("from")),
372 int(range_ver.group("to"))) 356 int(range_ver.group("to")))
373 versions = chain(newversions, versions) 357 versions = itertools.chain(newversions, versions)
374 358
375def multi_finalize(fn, d): 359def multi_finalize(fn, d):
376 safe_d = d 360 safe_d = d
377 361
378 d = bb.data.createCopy(safe_d) 362 d = bb.data.createCopy(safe_d)
379 try: 363 try:
380 finalise(fn, d) 364 finalize(fn, d)
381 except bb.parse.SkipPackage: 365 except bb.parse.SkipPackage:
382 bb.data.setVar("__SKIPPED", True, d) 366 bb.data.setVar("__SKIPPED", True, d)
383 datastores = {"": safe_d} 367 datastores = {"": safe_d}
@@ -420,7 +404,7 @@ def multi_finalize(fn, d):
420 d = bb.data.createCopy(safe_d) 404 d = bb.data.createCopy(safe_d)
421 verfunc(pv, d, safe_d) 405 verfunc(pv, d, safe_d)
422 try: 406 try:
423 finalise(fn, d) 407 finalize(fn, d)
424 except bb.parse.SkipPackage: 408 except bb.parse.SkipPackage:
425 bb.data.setVar("__SKIPPED", True, d) 409 bb.data.setVar("__SKIPPED", True, d)
426 410
@@ -436,15 +420,15 @@ def multi_finalize(fn, d):
436 safe_d.setVar("BBCLASSEXTEND", extended) 420 safe_d.setVar("BBCLASSEXTEND", extended)
437 _create_variants(datastores, extended.split(), extendfunc) 421 _create_variants(datastores, extended.split(), extendfunc)
438 422
439 for variant, variant_d in datastores.items(): 423 for variant, variant_d in datastores.iteritems():
440 if variant: 424 if variant:
441 try: 425 try:
442 finalise(fn, variant_d) 426 finalize(fn, variant_d)
443 except bb.parse.SkipPackage: 427 except bb.parse.SkipPackage:
444 bb.data.setVar("__SKIPPED", True, variant_d) 428 bb.data.setVar("__SKIPPED", True, variant_d)
445 429
446 if len(datastores) > 1: 430 if len(datastores) > 1:
447 variants = filter(None, datastores.keys()) 431 variants = filter(None, datastores.iterkeys())
448 safe_d.setVar("__VARIANTS", " ".join(variants)) 432 safe_d.setVar("__VARIANTS", " ".join(variants))
449 433
450 datastores[""] = d 434 datastores[""] = d
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 262c883c95..bb56174881 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -11,7 +11,7 @@
11 11
12# Copyright (C) 2003, 2004 Chris Larson 12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell 13# Copyright (C) 2003, 2004 Phil Blundell
14# 14#
15# This program is free software; you can redistribute it and/or modify 15# This program is free software; you can redistribute it and/or modify
16# it under the terms of the GNU General Public License version 2 as 16# it under the terms of the GNU General Public License version 2 as
17# published by the Free Software Foundation. 17# published by the Free Software Foundation.
@@ -25,15 +25,17 @@
25# with this program; if not, write to the Free Software Foundation, Inc., 25# with this program; if not, write to the Free Software Foundation, Inc.,
26# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 26# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
27 27
28import re, bb, os, sys, time, string 28from __future__ import absolute_import
29import re, bb, os
29import bb.fetch, bb.build, bb.utils 30import bb.fetch, bb.build, bb.utils
30from bb import data, fetch 31from bb import data
31 32
32from ConfHandler import include, init 33from . import ConfHandler
33from bb.parse import ParseError, resolve_file, ast 34from .. import resolve_file, ast
35from .ConfHandler import include, init
34 36
35# For compatibility 37# For compatibility
36from bb.parse import vars_from_file 38bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
37 39
38__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" ) 40__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
39__inherit_regexp__ = re.compile( r"inherit\s+(.+)" ) 41__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
@@ -68,8 +70,8 @@ def inherit(files, d):
68 __inherit_cache = data.getVar('__inherit_cache', d) or [] 70 __inherit_cache = data.getVar('__inherit_cache', d) or []
69 fn = "" 71 fn = ""
70 lineno = 0 72 lineno = 0
71 files = data.expand(files, d)
72 for file in files: 73 for file in files:
74 file = data.expand(file, d)
73 if file[0] != "/" and file[-8:] != ".bbclass": 75 if file[0] != "/" and file[-8:] != ".bbclass":
74 file = os.path.join('classes', '%s.bbclass' % file) 76 file = os.path.join('classes', '%s.bbclass' % file)
75 77
@@ -80,17 +82,17 @@ def inherit(files, d):
80 include(fn, file, d, "inherit") 82 include(fn, file, d, "inherit")
81 __inherit_cache = data.getVar('__inherit_cache', d) or [] 83 __inherit_cache = data.getVar('__inherit_cache', d) or []
82 84
83def get_statements(filename, absolsute_filename, base_name): 85def get_statements(filename, absolute_filename, base_name):
84 global cached_statements 86 global cached_statements
85 87
86 try: 88 try:
87 return cached_statements[absolsute_filename] 89 return cached_statements[absolute_filename]
88 except KeyError: 90 except KeyError:
89 file = open(absolsute_filename, 'r') 91 file = open(absolute_filename, 'r')
90 statements = ast.StatementGroup() 92 statements = ast.StatementGroup()
91 93
92 lineno = 0 94 lineno = 0
93 while 1: 95 while True:
94 lineno = lineno + 1 96 lineno = lineno + 1
95 s = file.readline() 97 s = file.readline()
96 if not s: break 98 if not s: break
@@ -101,7 +103,7 @@ def get_statements(filename, absolsute_filename, base_name):
101 feeder(IN_PYTHON_EOF, "", filename, base_name, statements) 103 feeder(IN_PYTHON_EOF, "", filename, base_name, statements)
102 104
103 if filename.endswith(".bbclass") or filename.endswith(".inc"): 105 if filename.endswith(".bbclass") or filename.endswith(".inc"):
104 cached_statements[absolsute_filename] = statements 106 cached_statements[absolute_filename] = statements
105 return statements 107 return statements
106 108
107def handle(fn, d, include): 109def handle(fn, d, include):
@@ -118,7 +120,7 @@ def handle(fn, d, include):
118 bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)") 120 bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)")
119 121
120 (root, ext) = os.path.splitext(os.path.basename(fn)) 122 (root, ext) = os.path.splitext(os.path.basename(fn))
121 base_name = "%s%s" % (root,ext) 123 base_name = "%s%s" % (root, ext)
122 init(d) 124 init(d)
123 125
124 if ext == ".bbclass": 126 if ext == ".bbclass":
@@ -164,7 +166,7 @@ def handle(fn, d, include):
164 return d 166 return d
165 167
166def feeder(lineno, s, fn, root, statements): 168def feeder(lineno, s, fn, root, statements):
167 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__,__infunc__, __body__, classes, bb, __residue__ 169 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, classes, bb, __residue__
168 if __infunc__: 170 if __infunc__:
169 if s == '}': 171 if s == '}':
170 __body__.append('') 172 __body__.append('')
@@ -231,10 +233,9 @@ def feeder(lineno, s, fn, root, statements):
231 ast.handleInherit(statements, m) 233 ast.handleInherit(statements, m)
232 return 234 return
233 235
234 from bb.parse import ConfHandler
235 return ConfHandler.feeder(lineno, s, fn, statements) 236 return ConfHandler.feeder(lineno, s, fn, statements)
236 237
237# Add us to the handlers list 238# Add us to the handlers list
238from bb.parse import handlers 239from .. import handlers
239handlers.append({'supports': supports, 'handle': handle, 'init': init}) 240handlers.append({'supports': supports, 'handle': handle, 'init': init})
240del handlers 241del handlers
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index f4f85de245..9128a2ef8f 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -10,7 +10,7 @@
10 10
11# Copyright (C) 2003, 2004 Chris Larson 11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2003, 2004 Phil Blundell 12# Copyright (C) 2003, 2004 Phil Blundell
13# 13#
14# This program is free software; you can redistribute it and/or modify 14# This program is free software; you can redistribute it and/or modify
15# it under the terms of the GNU General Public License version 2 as 15# it under the terms of the GNU General Public License version 2 as
16# published by the Free Software Foundation. 16# published by the Free Software Foundation.
@@ -24,7 +24,8 @@
24# with this program; if not, write to the Free Software Foundation, Inc., 24# with this program; if not, write to the Free Software Foundation, Inc.,
25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26 26
27import re, bb.data, os, sys 27import re, bb.data, os
28import bb.utils
28from bb.parse import ParseError, resolve_file, ast 29from bb.parse import ParseError, resolve_file, ast
29 30
30#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$") 31#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
@@ -36,10 +37,7 @@ __export_regexp__ = re.compile( r"export\s+(.+)" )
36def init(data): 37def init(data):
37 topdir = bb.data.getVar('TOPDIR', data) 38 topdir = bb.data.getVar('TOPDIR', data)
38 if not topdir: 39 if not topdir:
39 topdir = os.getcwd() 40 bb.data.setVar('TOPDIR', os.getcwd(), data)
40 bb.data.setVar('TOPDIR', topdir, data)
41 if not bb.data.getVar('BBPATH', data):
42 bb.fatal("The BBPATH environment variable must be set")
43 41
44 42
45def supports(fn, d): 43def supports(fn, d):
@@ -60,7 +58,7 @@ def include(oldfn, fn, data, error_out):
60 if not os.path.isabs(fn): 58 if not os.path.isabs(fn):
61 dname = os.path.dirname(oldfn) 59 dname = os.path.dirname(oldfn)
62 bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1)) 60 bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1))
63 abs_fn = bb.which(bbpath, fn) 61 abs_fn = bb.utils.which(bbpath, fn)
64 if abs_fn: 62 if abs_fn:
65 fn = abs_fn 63 fn = abs_fn
66 64
@@ -88,7 +86,7 @@ def handle(fn, data, include):
88 86
89 statements = ast.StatementGroup() 87 statements = ast.StatementGroup()
90 lineno = 0 88 lineno = 0
91 while 1: 89 while True:
92 lineno = lineno + 1 90 lineno = lineno + 1
93 s = f.readline() 91 s = f.readline()
94 if not s: break 92 if not s: break
diff --git a/bitbake/lib/bb/parse/parse_py/__init__.py b/bitbake/lib/bb/parse/parse_py/__init__.py
index 9e0e00adda..3e658d0de9 100644
--- a/bitbake/lib/bb/parse/parse_py/__init__.py
+++ b/bitbake/lib/bb/parse/parse_py/__init__.py
@@ -25,9 +25,9 @@ File parsers for the BitBake build tools.
25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26# 26#
27# Based on functions from the base bb module, Copyright 2003 Holger Schurig 27# Based on functions from the base bb module, Copyright 2003 Holger Schurig
28__version__ = '1.0'
29 28
30__all__ = [ 'ConfHandler', 'BBHandler'] 29from __future__ import absolute_import
30from . import ConfHandler
31from . import BBHandler
31 32
32import ConfHandler 33__version__ = '1.0'
33import BBHandler
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
index bc4045fe85..df0409cd8a 100644
--- a/bitbake/lib/bb/persist_data.py
+++ b/bitbake/lib/bb/persist_data.py
@@ -16,6 +16,7 @@
16# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 16# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 17
18import bb, os 18import bb, os
19import bb.utils
19 20
20try: 21try:
21 import sqlite3 22 import sqlite3
@@ -33,58 +34,63 @@ class PersistData:
33 """ 34 """
34 BitBake Persistent Data Store 35 BitBake Persistent Data Store
35 36
36 Used to store data in a central location such that other threads/tasks can 37 Used to store data in a central location such that other threads/tasks can
37 access them at some future date. 38 access them at some future date.
38 39
39 The "domain" is used as a key to isolate each data pool and in this 40 The "domain" is used as a key to isolate each data pool and in this
40 implementation corresponds to an SQL table. The SQL table consists of a 41 implementation corresponds to an SQL table. The SQL table consists of a
41 simple key and value pair. 42 simple key and value pair.
42 43
43 Why sqlite? It handles all the locking issues for us. 44 Why sqlite? It handles all the locking issues for us.
44 """ 45 """
45 def __init__(self, d): 46 def __init__(self, d, persistent_database_connection):
47 if "connection" in persistent_database_connection:
48 self.cursor = persistent_database_connection["connection"].cursor()
49 return
46 self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True) 50 self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True)
47 if self.cachedir in [None, '']: 51 if self.cachedir in [None, '']:
48 bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.") 52 bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.")
49 try: 53 try:
50 os.stat(self.cachedir) 54 os.stat(self.cachedir)
51 except OSError: 55 except OSError:
52 bb.mkdirhier(self.cachedir) 56 bb.utils.mkdirhier(self.cachedir)
53 57
54 self.cachefile = os.path.join(self.cachedir,"bb_persist_data.sqlite3") 58 self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3")
55 bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile) 59 bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile)
56 60
57 self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None) 61 connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
62 persistent_database_connection["connection"] = connection
63 self.cursor = persistent_database_connection["connection"].cursor()
58 64
59 def addDomain(self, domain): 65 def addDomain(self, domain):
60 """ 66 """
61 Should be called before any domain is used 67 Should be called before any domain is used
62 Creates it if it doesn't exist. 68 Creates it if it doesn't exist.
63 """ 69 """
64 self.connection.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain) 70 self.cursor.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain)
65 71
66 def delDomain(self, domain): 72 def delDomain(self, domain):
67 """ 73 """
68 Removes a domain and all the data it contains 74 Removes a domain and all the data it contains
69 """ 75 """
70 self.connection.execute("DROP TABLE IF EXISTS %s;" % domain) 76 self.cursor.execute("DROP TABLE IF EXISTS %s;" % domain)
71 77
72 def getKeyValues(self, domain): 78 def getKeyValues(self, domain):
73 """ 79 """
74 Return a list of key + value pairs for a domain 80 Return a list of key + value pairs for a domain
75 """ 81 """
76 ret = {} 82 ret = {}
77 data = self.connection.execute("SELECT key, value from %s;" % domain) 83 data = self.cursor.execute("SELECT key, value from %s;" % domain)
78 for row in data: 84 for row in data:
79 ret[str(row[0])] = str(row[1]) 85 ret[str(row[0])] = str(row[1])
80 86
81 return ret 87 return ret
82 88
83 def getValue(self, domain, key): 89 def getValue(self, domain, key):
84 """ 90 """
85 Return the value of a key for a domain 91 Return the value of a key for a domain
86 """ 92 """
87 data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key]) 93 data = self.cursor.execute("SELECT * from %s where key=?;" % domain, [key])
88 for row in data: 94 for row in data:
89 return row[1] 95 return row[1]
90 96
@@ -92,7 +98,7 @@ class PersistData:
92 """ 98 """
93 Sets the value of a key for a domain 99 Sets the value of a key for a domain
94 """ 100 """
95 data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key]) 101 data = self.cursor.execute("SELECT * from %s where key=?;" % domain, [key])
96 rows = 0 102 rows = 0
97 for row in data: 103 for row in data:
98 rows = rows + 1 104 rows = rows + 1
@@ -108,14 +114,11 @@ class PersistData:
108 self._execute("DELETE from %s where key=?;" % domain, [key]) 114 self._execute("DELETE from %s where key=?;" % domain, [key])
109 115
110 def _execute(self, *query): 116 def _execute(self, *query):
111 while True: 117 while True:
112 try: 118 try:
113 self.connection.execute(*query) 119 self.cursor.execute(*query)
114 return 120 return
115 except sqlite3.OperationalError, e: 121 except sqlite3.OperationalError as e:
116 if 'database is locked' in str(e): 122 if 'database is locked' in str(e):
117 continue 123 continue
118 raise 124 raise
119
120
121
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
index 058996ba57..58326f0398 100644
--- a/bitbake/lib/bb/providers.py
+++ b/bitbake/lib/bb/providers.py
@@ -62,7 +62,7 @@ def sortPriorities(pn, dataCache, pkg_pn = None):
62def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): 62def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
63 """ 63 """
64 Check if the version pe,pv,pr is the preferred one. 64 Check if the version pe,pv,pr is the preferred one.
65 If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%' 65 If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%'
66 """ 66 """
67 if (pr == preferred_r or preferred_r == None): 67 if (pr == preferred_r or preferred_r == None):
68 if (pe == preferred_e or preferred_e == None): 68 if (pe == preferred_e or preferred_e == None):
@@ -103,7 +103,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
103 103
104 for file_set in pkg_pn: 104 for file_set in pkg_pn:
105 for f in file_set: 105 for f in file_set:
106 pe,pv,pr = dataCache.pkg_pepvpr[f] 106 pe, pv, pr = dataCache.pkg_pepvpr[f]
107 if preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): 107 if preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
108 preferred_file = f 108 preferred_file = f
109 preferred_ver = (pe, pv, pr) 109 preferred_ver = (pe, pv, pr)
@@ -136,7 +136,7 @@ def findLatestProvider(pn, cfgData, dataCache, file_set):
136 latest_p = 0 136 latest_p = 0
137 latest_f = None 137 latest_f = None
138 for file_name in file_set: 138 for file_name in file_set:
139 pe,pv,pr = dataCache.pkg_pepvpr[file_name] 139 pe, pv, pr = dataCache.pkg_pepvpr[file_name]
140 dp = dataCache.pkg_dp[file_name] 140 dp = dataCache.pkg_dp[file_name]
141 141
142 if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p): 142 if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p):
@@ -169,14 +169,14 @@ def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
169 169
170def _filterProviders(providers, item, cfgData, dataCache): 170def _filterProviders(providers, item, cfgData, dataCache):
171 """ 171 """
172 Take a list of providers and filter/reorder according to the 172 Take a list of providers and filter/reorder according to the
173 environment variables and previous build results 173 environment variables and previous build results
174 """ 174 """
175 eligible = [] 175 eligible = []
176 preferred_versions = {} 176 preferred_versions = {}
177 sortpkg_pn = {} 177 sortpkg_pn = {}
178 178
179 # The order of providers depends on the order of the files on the disk 179 # The order of providers depends on the order of the files on the disk
180 # up to here. Sort pkg_pn to make dependency issues reproducible rather 180 # up to here. Sort pkg_pn to make dependency issues reproducible rather
181 # than effectively random. 181 # than effectively random.
182 providers.sort() 182 providers.sort()
@@ -226,7 +226,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
226 226
227def filterProviders(providers, item, cfgData, dataCache): 227def filterProviders(providers, item, cfgData, dataCache):
228 """ 228 """
229 Take a list of providers and filter/reorder according to the 229 Take a list of providers and filter/reorder according to the
230 environment variables and previous build results 230 environment variables and previous build results
231 Takes a "normal" target item 231 Takes a "normal" target item
232 """ 232 """
@@ -254,7 +254,7 @@ def filterProviders(providers, item, cfgData, dataCache):
254 254
255def filterProvidersRunTime(providers, item, cfgData, dataCache): 255def filterProvidersRunTime(providers, item, cfgData, dataCache):
256 """ 256 """
257 Take a list of providers and filter/reorder according to the 257 Take a list of providers and filter/reorder according to the
258 environment variables and previous build results 258 environment variables and previous build results
259 Takes a "runtime" target item 259 Takes a "runtime" target item
260 """ 260 """
@@ -297,7 +297,7 @@ def getRuntimeProviders(dataCache, rdepend):
297 rproviders = [] 297 rproviders = []
298 298
299 if rdepend in dataCache.rproviders: 299 if rdepend in dataCache.rproviders:
300 rproviders += dataCache.rproviders[rdepend] 300 rproviders += dataCache.rproviders[rdepend]
301 301
302 if rdepend in dataCache.packages: 302 if rdepend in dataCache.packages:
303 rproviders += dataCache.packages[rdepend] 303 rproviders += dataCache.packages[rdepend]
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index c5f4380c86..a4aea6c004 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -22,15 +22,15 @@ Handles preparation and execution of a queue of tasks
22# with this program; if not, write to the Free Software Foundation, Inc., 22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 24
25from bb import msg, data, event, mkdirhier, utils
26import bb, os, sys 25import bb, os, sys
26from bb import msg, data, event
27import signal 27import signal
28import stat 28import stat
29import fcntl 29import fcntl
30 30
31class TaskFailure(Exception): 31class TaskFailure(Exception):
32 """Exception raised when a task in a runqueue fails""" 32 """Exception raised when a task in a runqueue fails"""
33 def __init__(self, x): 33 def __init__(self, x):
34 self.args = x 34 self.args = x
35 35
36 36
@@ -60,7 +60,7 @@ class RunQueueStats:
60 def taskActive(self): 60 def taskActive(self):
61 self.active = self.active + 1 61 self.active = self.active + 1
62 62
63# These values indicate the next step due to be run in the 63# These values indicate the next step due to be run in the
64# runQueue state machine 64# runQueue state machine
65runQueuePrepare = 2 65runQueuePrepare = 2
66runQueueRunInit = 3 66runQueueRunInit = 3
@@ -76,7 +76,7 @@ class RunQueueScheduler:
76 """ 76 """
77 def __init__(self, runqueue): 77 def __init__(self, runqueue):
78 """ 78 """
79 The default scheduler just returns the first buildable task (the 79 The default scheduler just returns the first buildable task (the
80 priority map is sorted by task numer) 80 priority map is sorted by task numer)
81 """ 81 """
82 self.rq = runqueue 82 self.rq = runqueue
@@ -109,8 +109,7 @@ class RunQueueSchedulerSpeed(RunQueueScheduler):
109 109
110 self.rq = runqueue 110 self.rq = runqueue
111 111
112 sortweight = deepcopy(self.rq.runq_weight) 112 sortweight = sorted(deepcopy(self.rq.runq_weight))
113 sortweight.sort()
114 copyweight = deepcopy(self.rq.runq_weight) 113 copyweight = deepcopy(self.rq.runq_weight)
115 self.prio_map = [] 114 self.prio_map = []
116 115
@@ -123,10 +122,10 @@ class RunQueueSchedulerSpeed(RunQueueScheduler):
123 122
124class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed): 123class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
125 """ 124 """
126 A scheduler optimised to complete .bb files are quickly as possible. The 125 A scheduler optimised to complete .bb files are quickly as possible. The
127 priority map is sorted by task weight, but then reordered so once a given 126 priority map is sorted by task weight, but then reordered so once a given
128 .bb file starts to build, its completed as quickly as possible. This works 127 .bb file starts to build, its completed as quickly as possible. This works
129 well where disk space is at a premium and classes like OE's rm_work are in 128 well where disk space is at a premium and classes like OE's rm_work are in
130 force. 129 force.
131 """ 130 """
132 def __init__(self, runqueue): 131 def __init__(self, runqueue):
@@ -135,7 +134,7 @@ class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
135 134
136 #FIXME - whilst this groups all fnids together it does not reorder the 135 #FIXME - whilst this groups all fnids together it does not reorder the
137 #fnid groups optimally. 136 #fnid groups optimally.
138 137
139 basemap = deepcopy(self.prio_map) 138 basemap = deepcopy(self.prio_map)
140 self.prio_map = [] 139 self.prio_map = []
141 while (len(basemap) > 0): 140 while (len(basemap) > 0):
@@ -231,7 +230,7 @@ class RunQueue:
231 if chain1[index] != chain2[index]: 230 if chain1[index] != chain2[index]:
232 return False 231 return False
233 return True 232 return True
234 233
235 def chain_array_contains(chain, chain_array): 234 def chain_array_contains(chain, chain_array):
236 """ 235 """
237 Return True if chain_array contains chain 236 Return True if chain_array contains chain
@@ -286,7 +285,7 @@ class RunQueue:
286 285
287 def calculate_task_weights(self, endpoints): 286 def calculate_task_weights(self, endpoints):
288 """ 287 """
289 Calculate a number representing the "weight" of each task. Heavier weighted tasks 288 Calculate a number representing the "weight" of each task. Heavier weighted tasks
290 have more dependencies and hence should be executed sooner for maximum speed. 289 have more dependencies and hence should be executed sooner for maximum speed.
291 290
292 This function also sanity checks the task list finding tasks that its not 291 This function also sanity checks the task list finding tasks that its not
@@ -307,7 +306,7 @@ class RunQueue:
307 weight[listid] = 1 306 weight[listid] = 1
308 task_done[listid] = True 307 task_done[listid] = True
309 308
310 while 1: 309 while True:
311 next_points = [] 310 next_points = []
312 for listid in endpoints: 311 for listid in endpoints:
313 for revdep in self.runq_depends[listid]: 312 for revdep in self.runq_depends[listid]:
@@ -318,7 +317,7 @@ class RunQueue:
318 task_done[revdep] = True 317 task_done[revdep] = True
319 endpoints = next_points 318 endpoints = next_points
320 if len(next_points) == 0: 319 if len(next_points) == 0:
321 break 320 break
322 321
323 # Circular dependency sanity check 322 # Circular dependency sanity check
324 problem_tasks = [] 323 problem_tasks = []
@@ -345,7 +344,7 @@ class RunQueue:
345 344
346 def prepare_runqueue(self): 345 def prepare_runqueue(self):
347 """ 346 """
348 Turn a set of taskData into a RunQueue and compute data needed 347 Turn a set of taskData into a RunQueue and compute data needed
349 to optimise the execution order. 348 to optimise the execution order.
350 """ 349 """
351 350
@@ -365,12 +364,12 @@ class RunQueue:
365 # Step A - Work out a list of tasks to run 364 # Step A - Work out a list of tasks to run
366 # 365 #
367 # Taskdata gives us a list of possible providers for every build and run 366 # Taskdata gives us a list of possible providers for every build and run
368 # target ordered by priority. It also gives information on each of those 367 # target ordered by priority. It also gives information on each of those
369 # providers. 368 # providers.
370 # 369 #
371 # To create the actual list of tasks to execute we fix the list of 370 # To create the actual list of tasks to execute we fix the list of
372 # providers and then resolve the dependencies into task IDs. This 371 # providers and then resolve the dependencies into task IDs. This
373 # process is repeated for each type of dependency (tdepends, deptask, 372 # process is repeated for each type of dependency (tdepends, deptask,
374 # rdeptast, recrdeptask, idepends). 373 # rdeptast, recrdeptask, idepends).
375 374
376 def add_build_dependencies(depids, tasknames, depends): 375 def add_build_dependencies(depids, tasknames, depends):
@@ -411,12 +410,12 @@ class RunQueue:
411 410
412 if fnid not in taskData.failed_fnids: 411 if fnid not in taskData.failed_fnids:
413 412
414 # Resolve task internal dependencies 413 # Resolve task internal dependencies
415 # 414 #
416 # e.g. addtask before X after Y 415 # e.g. addtask before X after Y
417 depends = taskData.tasks_tdepends[task] 416 depends = taskData.tasks_tdepends[task]
418 417
419 # Resolve 'deptask' dependencies 418 # Resolve 'deptask' dependencies
420 # 419 #
421 # e.g. do_sometask[deptask] = "do_someothertask" 420 # e.g. do_sometask[deptask] = "do_someothertask"
422 # (makes sure sometask runs after someothertask of all DEPENDS) 421 # (makes sure sometask runs after someothertask of all DEPENDS)
@@ -424,7 +423,7 @@ class RunQueue:
424 tasknames = task_deps['deptask'][taskData.tasks_name[task]].split() 423 tasknames = task_deps['deptask'][taskData.tasks_name[task]].split()
425 add_build_dependencies(taskData.depids[fnid], tasknames, depends) 424 add_build_dependencies(taskData.depids[fnid], tasknames, depends)
426 425
427 # Resolve 'rdeptask' dependencies 426 # Resolve 'rdeptask' dependencies
428 # 427 #
429 # e.g. do_sometask[rdeptask] = "do_someothertask" 428 # e.g. do_sometask[rdeptask] = "do_someothertask"
430 # (makes sure sometask runs after someothertask of all RDEPENDS) 429 # (makes sure sometask runs after someothertask of all RDEPENDS)
@@ -432,7 +431,7 @@ class RunQueue:
432 taskname = task_deps['rdeptask'][taskData.tasks_name[task]] 431 taskname = task_deps['rdeptask'][taskData.tasks_name[task]]
433 add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends) 432 add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
434 433
435 # Resolve inter-task dependencies 434 # Resolve inter-task dependencies
436 # 435 #
437 # e.g. do_sometask[depends] = "targetname:do_someothertask" 436 # e.g. do_sometask[depends] = "targetname:do_someothertask"
438 # (makes sure sometask runs after targetname's someothertask) 437 # (makes sure sometask runs after targetname's someothertask)
@@ -467,8 +466,8 @@ class RunQueue:
467 newdep = [] 466 newdep = []
468 bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s %s) contains self reference! %s" % (task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)) 467 bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s %s) contains self reference! %s" % (task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends))
469 for dep in depends: 468 for dep in depends:
470 if task != dep: 469 if task != dep:
471 newdep.append(dep) 470 newdep.append(dep)
472 depends = newdep 471 depends = newdep
473 472
474 self.runq_fnid.append(taskData.tasks_fnid[task]) 473 self.runq_fnid.append(taskData.tasks_fnid[task])
@@ -482,7 +481,7 @@ class RunQueue:
482 # 481 #
483 # Build a list of recursive cumulative dependencies for each fnid 482 # Build a list of recursive cumulative dependencies for each fnid
484 # We do this by fnid, since if A depends on some task in B 483 # We do this by fnid, since if A depends on some task in B
485 # we're interested in later tasks B's fnid might have but B itself 484 # we're interested in later tasks B's fnid might have but B itself
486 # doesn't depend on 485 # doesn't depend on
487 # 486 #
488 # Algorithm is O(tasks) + O(tasks)*O(fnids) 487 # Algorithm is O(tasks) + O(tasks)*O(fnids)
@@ -513,7 +512,7 @@ class RunQueue:
513 if len(runq_recrdepends[task]) > 0: 512 if len(runq_recrdepends[task]) > 0:
514 taskfnid = self.runq_fnid[task] 513 taskfnid = self.runq_fnid[task]
515 for dep in reccumdepends[taskfnid]: 514 for dep in reccumdepends[taskfnid]:
516 # Ignore self references 515 # Ignore self references
517 if dep == task: 516 if dep == task:
518 continue 517 continue
519 for taskname in runq_recrdepends[task]: 518 for taskname in runq_recrdepends[task]:
@@ -631,11 +630,11 @@ class RunQueue:
631 for dep in revdeps: 630 for dep in revdeps:
632 if dep in self.runq_depends[listid]: 631 if dep in self.runq_depends[listid]:
633 #self.dump_data(taskData) 632 #self.dump_data(taskData)
634 bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep] , taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid])) 633 bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
635 634
636 bb.msg.note(2, bb.msg.domain.RunQueue, "Compute totals (have %s endpoint(s))" % len(endpoints)) 635 bb.msg.note(2, bb.msg.domain.RunQueue, "Compute totals (have %s endpoint(s))" % len(endpoints))
637 636
638 # Calculate task weights 637 # Calculate task weights
639 # Check of higher length circular dependencies 638 # Check of higher length circular dependencies
640 self.runq_weight = self.calculate_task_weights(endpoints) 639 self.runq_weight = self.calculate_task_weights(endpoints)
641 640
@@ -657,7 +656,7 @@ class RunQueue:
657 for prov in self.dataCache.fn_provides[fn]: 656 for prov in self.dataCache.fn_provides[fn]:
658 if prov not in prov_list: 657 if prov not in prov_list:
659 prov_list[prov] = [fn] 658 prov_list[prov] = [fn]
660 elif fn not in prov_list[prov]: 659 elif fn not in prov_list[prov]:
661 prov_list[prov].append(fn) 660 prov_list[prov].append(fn)
662 error = False 661 error = False
663 for prov in prov_list: 662 for prov in prov_list:
@@ -703,7 +702,7 @@ class RunQueue:
703 buildable.append(task) 702 buildable.append(task)
704 703
705 def check_buildable(self, task, buildable): 704 def check_buildable(self, task, buildable):
706 for revdep in self.runq_revdeps[task]: 705 for revdep in self.runq_revdeps[task]:
707 alldeps = 1 706 alldeps = 1
708 for dep in self.runq_depends[revdep]: 707 for dep in self.runq_depends[revdep]:
709 if dep in unchecked: 708 if dep in unchecked:
@@ -774,7 +773,7 @@ class RunQueue:
774 #print "Not current: %s" % notcurrent 773 #print "Not current: %s" % notcurrent
775 774
776 if len(unchecked) > 0: 775 if len(unchecked) > 0:
777 bb.fatal("check_stamps fatal internal error") 776 bb.msg.fatal(bb.msg.domain.RunQueue, "check_stamps fatal internal error")
778 return current 777 return current
779 778
780 def check_stamp_task(self, task): 779 def check_stamp_task(self, task):
@@ -811,10 +810,10 @@ class RunQueue:
811 try: 810 try:
812 t2 = os.stat(stampfile2)[stat.ST_MTIME] 811 t2 = os.stat(stampfile2)[stat.ST_MTIME]
813 if t1 < t2: 812 if t1 < t2:
814 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s < %s" % (stampfile,stampfile2)) 813 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s < %s" % (stampfile, stampfile2))
815 iscurrent = False 814 iscurrent = False
816 except: 815 except:
817 bb.msg.debug(2, bb.msg.domain.RunQueue, "Exception reading %s for %s" % (stampfile2 ,stampfile)) 816 bb.msg.debug(2, bb.msg.domain.RunQueue, "Exception reading %s for %s" % (stampfile2, stampfile))
818 iscurrent = False 817 iscurrent = False
819 818
820 return iscurrent 819 return iscurrent
@@ -852,7 +851,7 @@ class RunQueue:
852 return False 851 return False
853 852
854 if self.state is runQueueChildProcess: 853 if self.state is runQueueChildProcess:
855 print "Child process" 854 print("Child process")
856 return False 855 return False
857 856
858 # Loop 857 # Loop
@@ -885,7 +884,7 @@ class RunQueue:
885 def task_complete(self, task): 884 def task_complete(self, task):
886 """ 885 """
887 Mark a task as completed 886 Mark a task as completed
888 Look at the reverse dependencies and mark any task with 887 Look at the reverse dependencies and mark any task with
889 completed dependencies as buildable 888 completed dependencies as buildable
890 """ 889 """
891 self.runq_complete[task] = 1 890 self.runq_complete[task] = 1
@@ -929,7 +928,7 @@ class RunQueue:
929 while True: 928 while True:
930 task = None 929 task = None
931 if self.stats.active < self.number_tasks: 930 if self.stats.active < self.number_tasks:
932 task = self.sched.next() 931 task = next(self.sched)
933 if task is not None: 932 if task is not None:
934 fn = self.taskData.fn_index[self.runq_fnid[task]] 933 fn = self.taskData.fn_index[self.runq_fnid[task]]
935 934
@@ -948,7 +947,7 @@ class RunQueue:
948 try: 947 try:
949 pipein, pipeout = os.pipe() 948 pipein, pipeout = os.pipe()
950 pid = os.fork() 949 pid = os.fork()
951 except OSError, e: 950 except OSError as e:
952 bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror)) 951 bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror))
953 if pid == 0: 952 if pid == 0:
954 os.close(pipein) 953 os.close(pipein)
@@ -982,10 +981,11 @@ class RunQueue:
982 try: 981 try:
983 self.cooker.tryBuild(fn, taskname[3:]) 982 self.cooker.tryBuild(fn, taskname[3:])
984 except bb.build.EventException: 983 except bb.build.EventException:
985 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed")
986 os._exit(1) 984 os._exit(1)
987 except: 985 except Exception:
988 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed") 986 from traceback import format_exc
987 bb.msg.error(bb.msg.domain.Build, "Build of %s %s failed" % (fn, taskname))
988 bb.msg.error(bb.msg.domain.Build, format_exc())
989 os._exit(1) 989 os._exit(1)
990 os._exit(0) 990 os._exit(0)
991 991
@@ -1031,12 +1031,13 @@ class RunQueue:
1031 return 1031 return
1032 1032
1033 def finish_runqueue_now(self): 1033 def finish_runqueue_now(self):
1034 bb.msg.note(1, bb.msg.domain.RunQueue, "Sending SIGINT to remaining %s tasks" % self.stats.active) 1034 if self.stats.active:
1035 for k, v in self.build_pids.iteritems(): 1035 bb.msg.note(1, bb.msg.domain.RunQueue, "Sending SIGINT to remaining %s tasks" % self.stats.active)
1036 try: 1036 for k, v in self.build_pids.iteritems():
1037 os.kill(-k, signal.SIGINT) 1037 try:
1038 except: 1038 os.kill(-k, signal.SIGINT)
1039 pass 1039 except:
1040 pass
1040 for pipe in self.build_pipes: 1041 for pipe in self.build_pipes:
1041 self.build_pipes[pipe].read() 1042 self.build_pipes[pipe].read()
1042 1043
@@ -1085,30 +1086,30 @@ class RunQueue:
1085 """ 1086 """
1086 bb.msg.debug(3, bb.msg.domain.RunQueue, "run_tasks:") 1087 bb.msg.debug(3, bb.msg.domain.RunQueue, "run_tasks:")
1087 for task in range(len(self.runq_task)): 1088 for task in range(len(self.runq_task)):
1088 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, 1089 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
1089 taskQueue.fn_index[self.runq_fnid[task]], 1090 taskQueue.fn_index[self.runq_fnid[task]],
1090 self.runq_task[task], 1091 self.runq_task[task],
1091 self.runq_weight[task], 1092 self.runq_weight[task],
1092 self.runq_depends[task], 1093 self.runq_depends[task],
1093 self.runq_revdeps[task])) 1094 self.runq_revdeps[task]))
1094 1095
1095 bb.msg.debug(3, bb.msg.domain.RunQueue, "sorted_tasks:") 1096 bb.msg.debug(3, bb.msg.domain.RunQueue, "sorted_tasks:")
1096 for task1 in range(len(self.runq_task)): 1097 for task1 in range(len(self.runq_task)):
1097 if task1 in self.prio_map: 1098 if task1 in self.prio_map:
1098 task = self.prio_map[task1] 1099 task = self.prio_map[task1]
1099 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, 1100 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
1100 taskQueue.fn_index[self.runq_fnid[task]], 1101 taskQueue.fn_index[self.runq_fnid[task]],
1101 self.runq_task[task], 1102 self.runq_task[task],
1102 self.runq_weight[task], 1103 self.runq_weight[task],
1103 self.runq_depends[task], 1104 self.runq_depends[task],
1104 self.runq_revdeps[task])) 1105 self.runq_revdeps[task]))
1105 1106
1106 1107
1107class TaskFailure(Exception): 1108class TaskFailure(Exception):
1108 """ 1109 """
1109 Exception raised when a task in a runqueue fails 1110 Exception raised when a task in a runqueue fails
1110 """ 1111 """
1111 def __init__(self, x): 1112 def __init__(self, x):
1112 self.args = x 1113 self.args = x
1113 1114
1114 1115
@@ -1194,6 +1195,5 @@ class runQueuePipe():
1194 while self.read(): 1195 while self.read():
1195 continue 1196 continue
1196 if len(self.queue) > 0: 1197 if len(self.queue) > 0:
1197 print "Warning, worker left partial message" 1198 print("Warning, worker left partial message")
1198 os.close(self.fd) 1199 os.close(self.fd)
1199
diff --git a/bitbake/lib/bb/server/none.py b/bitbake/lib/bb/server/none.py
index ebda111582..e28aa8d7d7 100644
--- a/bitbake/lib/bb/server/none.py
+++ b/bitbake/lib/bb/server/none.py
@@ -115,7 +115,7 @@ class BitBakeServer():
115 115
116 def register_idle_function(self, function, data): 116 def register_idle_function(self, function, data):
117 """Register a function to be called while the server is idle""" 117 """Register a function to be called while the server is idle"""
118 assert callable(function) 118 assert hasattr(function, '__call__')
119 self._idlefuns[function] = data 119 self._idlefuns[function] = data
120 120
121 def idle_commands(self, delay): 121 def idle_commands(self, delay):
@@ -178,4 +178,3 @@ class BitBakeServerConnection():
178 self.connection.terminateServer() 178 self.connection.terminateServer()
179 except: 179 except:
180 pass 180 pass
181
diff --git a/bitbake/lib/bb/server/xmlrpc.py b/bitbake/lib/bb/server/xmlrpc.py
index 3364918c77..cb2949fb9f 100644
--- a/bitbake/lib/bb/server/xmlrpc.py
+++ b/bitbake/lib/bb/server/xmlrpc.py
@@ -42,7 +42,7 @@ from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
42import inspect, select 42import inspect, select
43 43
44if sys.hexversion < 0x020600F0: 44if sys.hexversion < 0x020600F0:
45 print "Sorry, python 2.6 or later is required for bitbake's XMLRPC mode" 45 print("Sorry, python 2.6 or later is required for bitbake's XMLRPC mode")
46 sys.exit(1) 46 sys.exit(1)
47 47
48class BitBakeServerCommands(): 48class BitBakeServerCommands():
@@ -74,7 +74,7 @@ class BitBakeServerCommands():
74 Trigger the server to quit 74 Trigger the server to quit
75 """ 75 """
76 self.server.quit = True 76 self.server.quit = True
77 print "Server (cooker) exitting" 77 print("Server (cooker) exitting")
78 return 78 return
79 79
80 def ping(self): 80 def ping(self):
@@ -89,8 +89,8 @@ class BitBakeServer(SimpleXMLRPCServer):
89 89
90 def __init__(self, cooker, interface = ("localhost", 0)): 90 def __init__(self, cooker, interface = ("localhost", 0)):
91 """ 91 """
92 Constructor 92 Constructor
93 """ 93 """
94 SimpleXMLRPCServer.__init__(self, interface, 94 SimpleXMLRPCServer.__init__(self, interface,
95 requestHandler=SimpleXMLRPCRequestHandler, 95 requestHandler=SimpleXMLRPCRequestHandler,
96 logRequests=False, allow_none=True) 96 logRequests=False, allow_none=True)
@@ -112,7 +112,7 @@ class BitBakeServer(SimpleXMLRPCServer):
112 112
113 def register_idle_function(self, function, data): 113 def register_idle_function(self, function, data):
114 """Register a function to be called while the server is idle""" 114 """Register a function to be called while the server is idle"""
115 assert callable(function) 115 assert hasattr(function, '__call__')
116 self._idlefuns[function] = data 116 self._idlefuns[function] = data
117 117
118 def serve_forever(self): 118 def serve_forever(self):
@@ -146,7 +146,7 @@ class BitBakeServer(SimpleXMLRPCServer):
146 traceback.print_exc() 146 traceback.print_exc()
147 pass 147 pass
148 if nextsleep is None and len(self._idlefuns) > 0: 148 if nextsleep is None and len(self._idlefuns) > 0:
149 nextsleep = 0 149 nextsleep = 0
150 self.timeout = nextsleep 150 self.timeout = nextsleep
151 # Tell idle functions we're exiting 151 # Tell idle functions we're exiting
152 for function, data in self._idlefuns.items(): 152 for function, data in self._idlefuns.items():
@@ -175,7 +175,7 @@ class BitBakeServerConnection():
175 def terminate(self): 175 def terminate(self):
176 # Don't wait for server indefinitely 176 # Don't wait for server indefinitely
177 import socket 177 import socket
178 socket.setdefaulttimeout(2) 178 socket.setdefaulttimeout(2)
179 try: 179 try:
180 self.events.system_quit() 180 self.events.system_quit()
181 except: 181 except:
@@ -184,4 +184,3 @@ class BitBakeServerConnection():
184 self.connection.terminateServer() 184 self.connection.terminateServer()
185 except: 185 except:
186 pass 186 pass
187
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py
index 7abea0f126..f9ca9d5bd3 100644
--- a/bitbake/lib/bb/shell.py
+++ b/bitbake/lib/bb/shell.py
@@ -52,12 +52,14 @@ PROBLEMS:
52# Import and setup global variables 52# Import and setup global variables
53########################################################################## 53##########################################################################
54 54
55from __future__ import print_function
56from functools import reduce
55try: 57try:
56 set 58 set
57except NameError: 59except NameError:
58 from sets import Set as set 60 from sets import Set as set
59import sys, os, readline, socket, httplib, urllib, commands, popen2, copy, shlex, Queue, fnmatch 61import sys, os, readline, socket, httplib, urllib, commands, popen2, shlex, Queue, fnmatch
60from bb import data, parse, build, fatal, cache, taskdata, runqueue, providers as Providers 62from bb import data, parse, build, cache, taskdata, runqueue, providers as Providers
61 63
62__version__ = "0.5.3.1" 64__version__ = "0.5.3.1"
63__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de> 65__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
@@ -98,7 +100,7 @@ class BitBakeShellCommands:
98 100
99 def _checkParsed( self ): 101 def _checkParsed( self ):
100 if not parsed: 102 if not parsed:
101 print "SHELL: This command needs to parse bbfiles..." 103 print("SHELL: This command needs to parse bbfiles...")
102 self.parse( None ) 104 self.parse( None )
103 105
104 def _findProvider( self, item ): 106 def _findProvider( self, item ):
@@ -119,28 +121,28 @@ class BitBakeShellCommands:
119 """Register a new name for a command""" 121 """Register a new name for a command"""
120 new, old = params 122 new, old = params
121 if not old in cmds: 123 if not old in cmds:
122 print "ERROR: Command '%s' not known" % old 124 print("ERROR: Command '%s' not known" % old)
123 else: 125 else:
124 cmds[new] = cmds[old] 126 cmds[new] = cmds[old]
125 print "OK" 127 print("OK")
126 alias.usage = "<alias> <command>" 128 alias.usage = "<alias> <command>"
127 129
128 def buffer( self, params ): 130 def buffer( self, params ):
129 """Dump specified output buffer""" 131 """Dump specified output buffer"""
130 index = params[0] 132 index = params[0]
131 print self._shell.myout.buffer( int( index ) ) 133 print(self._shell.myout.buffer( int( index ) ))
132 buffer.usage = "<index>" 134 buffer.usage = "<index>"
133 135
134 def buffers( self, params ): 136 def buffers( self, params ):
135 """Show the available output buffers""" 137 """Show the available output buffers"""
136 commands = self._shell.myout.bufferedCommands() 138 commands = self._shell.myout.bufferedCommands()
137 if not commands: 139 if not commands:
138 print "SHELL: No buffered commands available yet. Start doing something." 140 print("SHELL: No buffered commands available yet. Start doing something.")
139 else: 141 else:
140 print "="*35, "Available Output Buffers", "="*27 142 print("="*35, "Available Output Buffers", "="*27)
141 for index, cmd in enumerate( commands ): 143 for index, cmd in enumerate( commands ):
142 print "| %s %s" % ( str( index ).ljust( 3 ), cmd ) 144 print("| %s %s" % ( str( index ).ljust( 3 ), cmd ))
143 print "="*88 145 print("="*88)
144 146
145 def build( self, params, cmd = "build" ): 147 def build( self, params, cmd = "build" ):
146 """Build a providee""" 148 """Build a providee"""
@@ -149,7 +151,7 @@ class BitBakeShellCommands:
149 self._checkParsed() 151 self._checkParsed()
150 names = globfilter( cooker.status.pkg_pn, globexpr ) 152 names = globfilter( cooker.status.pkg_pn, globexpr )
151 if len( names ) == 0: names = [ globexpr ] 153 if len( names ) == 0: names = [ globexpr ]
152 print "SHELL: Building %s" % ' '.join( names ) 154 print("SHELL: Building %s" % ' '.join( names ))
153 155
154 td = taskdata.TaskData(cooker.configuration.abort) 156 td = taskdata.TaskData(cooker.configuration.abort)
155 localdata = data.createCopy(cooker.configuration.data) 157 localdata = data.createCopy(cooker.configuration.data)
@@ -168,22 +170,22 @@ class BitBakeShellCommands:
168 tasks.append([name, "do_%s" % cmd]) 170 tasks.append([name, "do_%s" % cmd])
169 171
170 td.add_unresolved(localdata, cooker.status) 172 td.add_unresolved(localdata, cooker.status)
171 173
172 rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks) 174 rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
173 rq.prepare_runqueue() 175 rq.prepare_runqueue()
174 rq.execute_runqueue() 176 rq.execute_runqueue()
175 177
176 except Providers.NoProvider: 178 except Providers.NoProvider:
177 print "ERROR: No Provider" 179 print("ERROR: No Provider")
178 last_exception = Providers.NoProvider 180 last_exception = Providers.NoProvider
179 181
180 except runqueue.TaskFailure, fnids: 182 except runqueue.TaskFailure as fnids:
181 for fnid in fnids: 183 for fnid in fnids:
182 print "ERROR: '%s' failed" % td.fn_index[fnid] 184 print("ERROR: '%s' failed" % td.fn_index[fnid])
183 last_exception = runqueue.TaskFailure 185 last_exception = runqueue.TaskFailure
184 186
185 except build.EventException, e: 187 except build.EventException as e:
186 print "ERROR: Couldn't build '%s'" % names 188 print("ERROR: Couldn't build '%s'" % names)
187 last_exception = e 189 last_exception = e
188 190
189 191
@@ -216,7 +218,7 @@ class BitBakeShellCommands:
216 if bbfile is not None: 218 if bbfile is not None:
217 os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) ) 219 os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) )
218 else: 220 else:
219 print "ERROR: Nothing provides '%s'" % name 221 print("ERROR: Nothing provides '%s'" % name)
220 edit.usage = "<providee>" 222 edit.usage = "<providee>"
221 223
222 def environment( self, params ): 224 def environment( self, params ):
@@ -239,14 +241,14 @@ class BitBakeShellCommands:
239 global last_exception 241 global last_exception
240 name = params[0] 242 name = params[0]
241 bf = completeFilePath( name ) 243 bf = completeFilePath( name )
242 print "SHELL: Calling '%s' on '%s'" % ( cmd, bf ) 244 print("SHELL: Calling '%s' on '%s'" % ( cmd, bf ))
243 245
244 try: 246 try:
245 cooker.buildFile(bf, cmd) 247 cooker.buildFile(bf, cmd)
246 except parse.ParseError: 248 except parse.ParseError:
247 print "ERROR: Unable to open or parse '%s'" % bf 249 print("ERROR: Unable to open or parse '%s'" % bf)
248 except build.EventException, e: 250 except build.EventException as e:
249 print "ERROR: Couldn't build '%s'" % name 251 print("ERROR: Couldn't build '%s'" % name)
250 last_exception = e 252 last_exception = e
251 253
252 fileBuild.usage = "<bbfile>" 254 fileBuild.usage = "<bbfile>"
@@ -270,62 +272,62 @@ class BitBakeShellCommands:
270 def fileReparse( self, params ): 272 def fileReparse( self, params ):
271 """(re)Parse a bb file""" 273 """(re)Parse a bb file"""
272 bbfile = params[0] 274 bbfile = params[0]
273 print "SHELL: Parsing '%s'" % bbfile 275 print("SHELL: Parsing '%s'" % bbfile)
274 parse.update_mtime( bbfile ) 276 parse.update_mtime( bbfile )
275 cooker.bb_cache.cacheValidUpdate(bbfile) 277 cooker.bb_cache.cacheValidUpdate(bbfile)
276 fromCache = cooker.bb_cache.loadData(bbfile, cooker.configuration.data, cooker.status) 278 fromCache = cooker.bb_cache.loadData(bbfile, cooker.configuration.data, cooker.status)
277 cooker.bb_cache.sync() 279 cooker.bb_cache.sync()
278 if False: #fromCache: 280 if False: #fromCache:
279 print "SHELL: File has not been updated, not reparsing" 281 print("SHELL: File has not been updated, not reparsing")
280 else: 282 else:
281 print "SHELL: Parsed" 283 print("SHELL: Parsed")
282 fileReparse.usage = "<bbfile>" 284 fileReparse.usage = "<bbfile>"
283 285
284 def abort( self, params ): 286 def abort( self, params ):
285 """Toggle abort task execution flag (see bitbake -k)""" 287 """Toggle abort task execution flag (see bitbake -k)"""
286 cooker.configuration.abort = not cooker.configuration.abort 288 cooker.configuration.abort = not cooker.configuration.abort
287 print "SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort ) 289 print("SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort ))
288 290
289 def force( self, params ): 291 def force( self, params ):
290 """Toggle force task execution flag (see bitbake -f)""" 292 """Toggle force task execution flag (see bitbake -f)"""
291 cooker.configuration.force = not cooker.configuration.force 293 cooker.configuration.force = not cooker.configuration.force
292 print "SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force ) 294 print("SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force ))
293 295
294 def help( self, params ): 296 def help( self, params ):
295 """Show a comprehensive list of commands and their purpose""" 297 """Show a comprehensive list of commands and their purpose"""
296 print "="*30, "Available Commands", "="*30 298 print("="*30, "Available Commands", "="*30)
297 for cmd in sorted(cmds): 299 for cmd in sorted(cmds):
298 function,numparams,usage,helptext = cmds[cmd] 300 function, numparams, usage, helptext = cmds[cmd]
299 print "| %s | %s" % (usage.ljust(30), helptext) 301 print("| %s | %s" % (usage.ljust(30), helptext))
300 print "="*78 302 print("="*78)
301 303
302 def lastError( self, params ): 304 def lastError( self, params ):
303 """Show the reason or log that was produced by the last BitBake event exception""" 305 """Show the reason or log that was produced by the last BitBake event exception"""
304 if last_exception is None: 306 if last_exception is None:
305 print "SHELL: No Errors yet (Phew)..." 307 print("SHELL: No Errors yet (Phew)...")
306 else: 308 else:
307 reason, event = last_exception.args 309 reason, event = last_exception.args
308 print "SHELL: Reason for the last error: '%s'" % reason 310 print("SHELL: Reason for the last error: '%s'" % reason)
309 if ':' in reason: 311 if ':' in reason:
310 msg, filename = reason.split( ':' ) 312 msg, filename = reason.split( ':' )
311 filename = filename.strip() 313 filename = filename.strip()
312 print "SHELL: Dumping log file for last error:" 314 print("SHELL: Dumping log file for last error:")
313 try: 315 try:
314 print open( filename ).read() 316 print(open( filename ).read())
315 except IOError: 317 except IOError:
316 print "ERROR: Couldn't open '%s'" % filename 318 print("ERROR: Couldn't open '%s'" % filename)
317 319
318 def match( self, params ): 320 def match( self, params ):
319 """Dump all files or providers matching a glob expression""" 321 """Dump all files or providers matching a glob expression"""
320 what, globexpr = params 322 what, globexpr = params
321 if what == "files": 323 if what == "files":
322 self._checkParsed() 324 self._checkParsed()
323 for key in globfilter( cooker.status.pkg_fn, globexpr ): print key 325 for key in globfilter( cooker.status.pkg_fn, globexpr ): print(key)
324 elif what == "providers": 326 elif what == "providers":
325 self._checkParsed() 327 self._checkParsed()
326 for key in globfilter( cooker.status.pkg_pn, globexpr ): print key 328 for key in globfilter( cooker.status.pkg_pn, globexpr ): print(key)
327 else: 329 else:
328 print "Usage: match %s" % self.print_.usage 330 print("Usage: match %s" % self.print_.usage)
329 match.usage = "<files|providers> <glob>" 331 match.usage = "<files|providers> <glob>"
330 332
331 def new( self, params ): 333 def new( self, params ):
@@ -335,15 +337,15 @@ class BitBakeShellCommands:
335 fulldirname = "%s/%s" % ( packages, dirname ) 337 fulldirname = "%s/%s" % ( packages, dirname )
336 338
337 if not os.path.exists( fulldirname ): 339 if not os.path.exists( fulldirname ):
338 print "SHELL: Creating '%s'" % fulldirname 340 print("SHELL: Creating '%s'" % fulldirname)
339 os.mkdir( fulldirname ) 341 os.mkdir( fulldirname )
340 if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ): 342 if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ):
341 if os.path.exists( "%s/%s" % ( fulldirname, filename ) ): 343 if os.path.exists( "%s/%s" % ( fulldirname, filename ) ):
342 print "SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename ) 344 print("SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename ))
343 return False 345 return False
344 print "SHELL: Creating '%s/%s'" % ( fulldirname, filename ) 346 print("SHELL: Creating '%s/%s'" % ( fulldirname, filename ))
345 newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" ) 347 newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" )
346 print >>newpackage,"""DESCRIPTION = "" 348 print("""DESCRIPTION = ""
347SECTION = "" 349SECTION = ""
348AUTHOR = "" 350AUTHOR = ""
349HOMEPAGE = "" 351HOMEPAGE = ""
@@ -370,7 +372,7 @@ SRC_URI = ""
370#do_install() { 372#do_install() {
371# 373#
372#} 374#}
373""" 375""", file=newpackage)
374 newpackage.close() 376 newpackage.close()
375 os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) ) 377 os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) )
376 new.usage = "<directory> <filename>" 378 new.usage = "<directory> <filename>"
@@ -390,14 +392,14 @@ SRC_URI = ""
390 def pasteLog( self, params ): 392 def pasteLog( self, params ):
391 """Send the last event exception error log (if there is one) to http://rafb.net/paste""" 393 """Send the last event exception error log (if there is one) to http://rafb.net/paste"""
392 if last_exception is None: 394 if last_exception is None:
393 print "SHELL: No Errors yet (Phew)..." 395 print("SHELL: No Errors yet (Phew)...")
394 else: 396 else:
395 reason, event = last_exception.args 397 reason, event = last_exception.args
396 print "SHELL: Reason for the last error: '%s'" % reason 398 print("SHELL: Reason for the last error: '%s'" % reason)
397 if ':' in reason: 399 if ':' in reason:
398 msg, filename = reason.split( ':' ) 400 msg, filename = reason.split( ':' )
399 filename = filename.strip() 401 filename = filename.strip()
400 print "SHELL: Pasting log file to pastebin..." 402 print("SHELL: Pasting log file to pastebin...")
401 403
402 file = open( filename ).read() 404 file = open( filename ).read()
403 sendToPastebin( "contents of " + filename, file ) 405 sendToPastebin( "contents of " + filename, file )
@@ -419,23 +421,23 @@ SRC_URI = ""
419 cooker.buildDepgraph() 421 cooker.buildDepgraph()
420 global parsed 422 global parsed
421 parsed = True 423 parsed = True
422 print 424 print()
423 425
424 def reparse( self, params ): 426 def reparse( self, params ):
425 """(re)Parse a providee's bb file""" 427 """(re)Parse a providee's bb file"""
426 bbfile = self._findProvider( params[0] ) 428 bbfile = self._findProvider( params[0] )
427 if bbfile is not None: 429 if bbfile is not None:
428 print "SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] ) 430 print("SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] ))
429 self.fileReparse( [ bbfile ] ) 431 self.fileReparse( [ bbfile ] )
430 else: 432 else:
431 print "ERROR: Nothing provides '%s'" % params[0] 433 print("ERROR: Nothing provides '%s'" % params[0])
432 reparse.usage = "<providee>" 434 reparse.usage = "<providee>"
433 435
434 def getvar( self, params ): 436 def getvar( self, params ):
435 """Dump the contents of an outer BitBake environment variable""" 437 """Dump the contents of an outer BitBake environment variable"""
436 var = params[0] 438 var = params[0]
437 value = data.getVar( var, cooker.configuration.data, 1 ) 439 value = data.getVar( var, cooker.configuration.data, 1 )
438 print value 440 print(value)
439 getvar.usage = "<variable>" 441 getvar.usage = "<variable>"
440 442
441 def peek( self, params ): 443 def peek( self, params ):
@@ -445,9 +447,9 @@ SRC_URI = ""
445 if bbfile is not None: 447 if bbfile is not None:
446 the_data = cooker.bb_cache.loadDataFull(bbfile, cooker.configuration.data) 448 the_data = cooker.bb_cache.loadDataFull(bbfile, cooker.configuration.data)
447 value = the_data.getVar( var, 1 ) 449 value = the_data.getVar( var, 1 )
448 print value 450 print(value)
449 else: 451 else:
450 print "ERROR: Nothing provides '%s'" % name 452 print("ERROR: Nothing provides '%s'" % name)
451 peek.usage = "<providee> <variable>" 453 peek.usage = "<providee> <variable>"
452 454
453 def poke( self, params ): 455 def poke( self, params ):
@@ -455,7 +457,7 @@ SRC_URI = ""
455 name, var, value = params 457 name, var, value = params
456 bbfile = self._findProvider( name ) 458 bbfile = self._findProvider( name )
457 if bbfile is not None: 459 if bbfile is not None:
458 print "ERROR: Sorry, this functionality is currently broken" 460 print("ERROR: Sorry, this functionality is currently broken")
459 #d = cooker.pkgdata[bbfile] 461 #d = cooker.pkgdata[bbfile]
460 #data.setVar( var, value, d ) 462 #data.setVar( var, value, d )
461 463
@@ -463,7 +465,7 @@ SRC_URI = ""
463 #cooker.pkgdata.setDirty(bbfile, d) 465 #cooker.pkgdata.setDirty(bbfile, d)
464 #print "OK" 466 #print "OK"
465 else: 467 else:
466 print "ERROR: Nothing provides '%s'" % name 468 print("ERROR: Nothing provides '%s'" % name)
467 poke.usage = "<providee> <variable> <value>" 469 poke.usage = "<providee> <variable> <value>"
468 470
469 def print_( self, params ): 471 def print_( self, params ):
@@ -471,12 +473,12 @@ SRC_URI = ""
471 what = params[0] 473 what = params[0]
472 if what == "files": 474 if what == "files":
473 self._checkParsed() 475 self._checkParsed()
474 for key in cooker.status.pkg_fn: print key 476 for key in cooker.status.pkg_fn: print(key)
475 elif what == "providers": 477 elif what == "providers":
476 self._checkParsed() 478 self._checkParsed()
477 for key in cooker.status.providers: print key 479 for key in cooker.status.providers: print(key)
478 else: 480 else:
479 print "Usage: print %s" % self.print_.usage 481 print("Usage: print %s" % self.print_.usage)
480 print_.usage = "<files|providers>" 482 print_.usage = "<files|providers>"
481 483
482 def python( self, params ): 484 def python( self, params ):
@@ -496,7 +498,7 @@ SRC_URI = ""
496 """Set an outer BitBake environment variable""" 498 """Set an outer BitBake environment variable"""
497 var, value = params 499 var, value = params
498 data.setVar( var, value, cooker.configuration.data ) 500 data.setVar( var, value, cooker.configuration.data )
499 print "OK" 501 print("OK")
500 setVar.usage = "<variable> <value>" 502 setVar.usage = "<variable> <value>"
501 503
502 def rebuild( self, params ): 504 def rebuild( self, params ):
@@ -508,7 +510,7 @@ SRC_URI = ""
508 def shell( self, params ): 510 def shell( self, params ):
509 """Execute a shell command and dump the output""" 511 """Execute a shell command and dump the output"""
510 if params != "": 512 if params != "":
511 print commands.getoutput( " ".join( params ) ) 513 print(commands.getoutput( " ".join( params ) ))
512 shell.usage = "<...>" 514 shell.usage = "<...>"
513 515
514 def stage( self, params ): 516 def stage( self, params ):
@@ -518,17 +520,17 @@ SRC_URI = ""
518 520
519 def status( self, params ): 521 def status( self, params ):
520 """<just for testing>""" 522 """<just for testing>"""
521 print "-" * 78 523 print("-" * 78)
522 print "building list = '%s'" % cooker.building_list 524 print("building list = '%s'" % cooker.building_list)
523 print "build path = '%s'" % cooker.build_path 525 print("build path = '%s'" % cooker.build_path)
524 print "consider_msgs_cache = '%s'" % cooker.consider_msgs_cache 526 print("consider_msgs_cache = '%s'" % cooker.consider_msgs_cache)
525 print "build stats = '%s'" % cooker.stats 527 print("build stats = '%s'" % cooker.stats)
526 if last_exception is not None: print "last_exception = '%s'" % repr( last_exception.args ) 528 if last_exception is not None: print("last_exception = '%s'" % repr( last_exception.args ))
527 print "memory output contents = '%s'" % self._shell.myout._buffer 529 print("memory output contents = '%s'" % self._shell.myout._buffer)
528 530
529 def test( self, params ): 531 def test( self, params ):
530 """<just for testing>""" 532 """<just for testing>"""
531 print "testCommand called with '%s'" % params 533 print("testCommand called with '%s'" % params)
532 534
533 def unpack( self, params ): 535 def unpack( self, params ):
534 """Execute 'unpack' on a providee""" 536 """Execute 'unpack' on a providee"""
@@ -553,12 +555,12 @@ SRC_URI = ""
553 try: 555 try:
554 providers = cooker.status.providers[item] 556 providers = cooker.status.providers[item]
555 except KeyError: 557 except KeyError:
556 print "SHELL: ERROR: Nothing provides", preferred 558 print("SHELL: ERROR: Nothing provides", preferred)
557 else: 559 else:
558 for provider in providers: 560 for provider in providers:
559 if provider == pf: provider = " (***) %s" % provider 561 if provider == pf: provider = " (***) %s" % provider
560 else: provider = " %s" % provider 562 else: provider = " %s" % provider
561 print provider 563 print(provider)
562 which.usage = "<providee>" 564 which.usage = "<providee>"
563 565
564########################################################################## 566##########################################################################
@@ -583,7 +585,7 @@ def sendToPastebin( desc, content ):
583 mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" ) 585 mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" )
584 mydata["text"] = content 586 mydata["text"] = content
585 params = urllib.urlencode( mydata ) 587 params = urllib.urlencode( mydata )
586 headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain"} 588 headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
587 589
588 host = "rafb.net" 590 host = "rafb.net"
589 conn = httplib.HTTPConnection( "%s:80" % host ) 591 conn = httplib.HTTPConnection( "%s:80" % host )
@@ -594,9 +596,9 @@ def sendToPastebin( desc, content ):
594 596
595 if response.status == 302: 597 if response.status == 302:
596 location = response.getheader( "location" ) or "unknown" 598 location = response.getheader( "location" ) or "unknown"
597 print "SHELL: Pasted to http://%s%s" % ( host, location ) 599 print("SHELL: Pasted to http://%s%s" % ( host, location ))
598 else: 600 else:
599 print "ERROR: %s %s" % ( response.status, response.reason ) 601 print("ERROR: %s %s" % ( response.status, response.reason ))
600 602
601def completer( text, state ): 603def completer( text, state ):
602 """Return a possible readline completion""" 604 """Return a possible readline completion"""
@@ -643,7 +645,7 @@ def columnize( alist, width = 80 ):
643 return reduce(lambda line, word, width=width: '%s%s%s' % 645 return reduce(lambda line, word, width=width: '%s%s%s' %
644 (line, 646 (line,
645 ' \n'[(len(line[line.rfind('\n')+1:]) 647 ' \n'[(len(line[line.rfind('\n')+1:])
646 + len(word.split('\n',1)[0] 648 + len(word.split('\n', 1)[0]
647 ) >= width)], 649 ) >= width)],
648 word), 650 word),
649 alist 651 alist
@@ -718,7 +720,7 @@ class BitBakeShell:
718 except IOError: 720 except IOError:
719 pass # It doesn't exist yet. 721 pass # It doesn't exist yet.
720 722
721 print __credits__ 723 print(__credits__)
722 724
723 def cleanup( self ): 725 def cleanup( self ):
724 """Write readline history and clean up resources""" 726 """Write readline history and clean up resources"""
@@ -726,7 +728,7 @@ class BitBakeShell:
726 try: 728 try:
727 readline.write_history_file( self.historyfilename ) 729 readline.write_history_file( self.historyfilename )
728 except: 730 except:
729 print "SHELL: Unable to save command history" 731 print("SHELL: Unable to save command history")
730 732
731 def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ): 733 def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ):
732 """Register a command""" 734 """Register a command"""
@@ -740,11 +742,11 @@ class BitBakeShell:
740 try: 742 try:
741 function, numparams, usage, helptext = cmds[command] 743 function, numparams, usage, helptext = cmds[command]
742 except KeyError: 744 except KeyError:
743 print "SHELL: ERROR: '%s' command is not a valid command." % command 745 print("SHELL: ERROR: '%s' command is not a valid command." % command)
744 self.myout.removeLast() 746 self.myout.removeLast()
745 else: 747 else:
746 if (numparams != -1) and (not len( params ) == numparams): 748 if (numparams != -1) and (not len( params ) == numparams):
747 print "Usage: '%s'" % usage 749 print("Usage: '%s'" % usage)
748 return 750 return
749 751
750 result = function( self.commands, params ) 752 result = function( self.commands, params )
@@ -759,7 +761,7 @@ class BitBakeShell:
759 if not cmdline: 761 if not cmdline:
760 continue 762 continue
761 if "|" in cmdline: 763 if "|" in cmdline:
762 print "ERROR: '|' in startup file is not allowed. Ignoring line" 764 print("ERROR: '|' in startup file is not allowed. Ignoring line")
763 continue 765 continue
764 self.commandQ.put( cmdline.strip() ) 766 self.commandQ.put( cmdline.strip() )
765 767
@@ -801,10 +803,10 @@ class BitBakeShell:
801 sys.stdout.write( pipe.fromchild.read() ) 803 sys.stdout.write( pipe.fromchild.read() )
802 # 804 #
803 except EOFError: 805 except EOFError:
804 print 806 print()
805 return 807 return
806 except KeyboardInterrupt: 808 except KeyboardInterrupt:
807 print 809 print()
808 810
809########################################################################## 811##########################################################################
810# Start function - called from the BitBake command line utility 812# Start function - called from the BitBake command line utility
@@ -819,4 +821,4 @@ def start( aCooker ):
819 bbshell.cleanup() 821 bbshell.cleanup()
820 822
821if __name__ == "__main__": 823if __name__ == "__main__":
822 print "SHELL: Sorry, this program should only be called by BitBake." 824 print("SHELL: Sorry, this program should only be called by BitBake.")
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
index 3e5e006f5f..e31f967850 100644
--- a/bitbake/lib/bb/taskdata.py
+++ b/bitbake/lib/bb/taskdata.py
@@ -34,7 +34,7 @@ def re_match_strings(target, strings):
34 34
35 for name in strings: 35 for name in strings:
36 if (name==target or 36 if (name==target or
37 re.search(name,target)!=None): 37 re.search(name, target)!=None):
38 return True 38 return True
39 return False 39 return False
40 40
@@ -84,7 +84,7 @@ class TaskData:
84 84
85 def getrun_id(self, name): 85 def getrun_id(self, name):
86 """ 86 """
87 Return an ID number for the run target name. 87 Return an ID number for the run target name.
88 If it doesn't exist, create one. 88 If it doesn't exist, create one.
89 """ 89 """
90 if not name in self.run_names_index: 90 if not name in self.run_names_index:
@@ -95,7 +95,7 @@ class TaskData:
95 95
96 def getfn_id(self, name): 96 def getfn_id(self, name):
97 """ 97 """
98 Return an ID number for the filename. 98 Return an ID number for the filename.
99 If it doesn't exist, create one. 99 If it doesn't exist, create one.
100 """ 100 """
101 if not name in self.fn_index: 101 if not name in self.fn_index:
@@ -271,7 +271,7 @@ class TaskData:
271 271
272 def get_unresolved_build_targets(self, dataCache): 272 def get_unresolved_build_targets(self, dataCache):
273 """ 273 """
274 Return a list of build targets who's providers 274 Return a list of build targets who's providers
275 are unknown. 275 are unknown.
276 """ 276 """
277 unresolved = [] 277 unresolved = []
@@ -286,7 +286,7 @@ class TaskData:
286 286
287 def get_unresolved_run_targets(self, dataCache): 287 def get_unresolved_run_targets(self, dataCache):
288 """ 288 """
289 Return a list of runtime targets who's providers 289 Return a list of runtime targets who's providers
290 are unknown. 290 are unknown.
291 """ 291 """
292 unresolved = [] 292 unresolved = []
@@ -304,7 +304,7 @@ class TaskData:
304 Return a list of providers of item 304 Return a list of providers of item
305 """ 305 """
306 targetid = self.getbuild_id(item) 306 targetid = self.getbuild_id(item)
307 307
308 return self.build_targets[targetid] 308 return self.build_targets[targetid]
309 309
310 def get_dependees(self, itemid): 310 def get_dependees(self, itemid):
@@ -354,20 +354,15 @@ class TaskData:
354 self.add_provider_internal(cfgData, dataCache, item) 354 self.add_provider_internal(cfgData, dataCache, item)
355 except bb.providers.NoProvider: 355 except bb.providers.NoProvider:
356 if self.abort: 356 if self.abort:
357 if self.get_rdependees_str(item):
358 bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (item, self.get_dependees_str(item)))
359 else:
360 bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (item))
361 raise 357 raise
362 targetid = self.getbuild_id(item) 358 self.remove_buildtarget(self.getbuild_id(item))
363 self.remove_buildtarget(targetid)
364 359
365 self.mark_external_target(item) 360 self.mark_external_target(item)
366 361
367 def add_provider_internal(self, cfgData, dataCache, item): 362 def add_provider_internal(self, cfgData, dataCache, item):
368 """ 363 """
369 Add the providers of item to the task data 364 Add the providers of item to the task data
370 Mark entries were specifically added externally as against dependencies 365 Mark entries were specifically added externally as against dependencies
371 added internally during dependency resolution 366 added internally during dependency resolution
372 """ 367 """
373 368
@@ -375,11 +370,7 @@ class TaskData:
375 return 370 return
376 371
377 if not item in dataCache.providers: 372 if not item in dataCache.providers:
378 if self.get_rdependees_str(item): 373 bb.event.fire(bb.event.NoProvider(item, dependees=self.get_rdependees_str(item)), cfgData)
379 bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (item, self.get_dependees_str(item)))
380 else:
381 bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (item))
382 bb.event.fire(bb.event.NoProvider(item), cfgData)
383 raise bb.providers.NoProvider(item) 374 raise bb.providers.NoProvider(item)
384 375
385 if self.have_build_target(item): 376 if self.have_build_target(item):
@@ -391,8 +382,7 @@ class TaskData:
391 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids] 382 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
392 383
393 if not eligible: 384 if not eligible:
394 bb.msg.note(2, bb.msg.domain.Provider, "No buildable provider PROVIDES '%s' but '%s' DEPENDS on or otherwise requires it. Enable debugging and see earlier logs to find unbuildable providers." % (item, self.get_dependees_str(item))) 385 bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item)), cfgData)
395 bb.event.fire(bb.event.NoProvider(item), cfgData)
396 raise bb.providers.NoProvider(item) 386 raise bb.providers.NoProvider(item)
397 387
398 if len(eligible) > 1 and foundUnique == False: 388 if len(eligible) > 1 and foundUnique == False:
@@ -400,8 +390,6 @@ class TaskData:
400 providers_list = [] 390 providers_list = []
401 for fn in eligible: 391 for fn in eligible:
402 providers_list.append(dataCache.pkg_fn[fn]) 392 providers_list.append(dataCache.pkg_fn[fn])
403 bb.msg.note(1, bb.msg.domain.Provider, "multiple providers are available for %s (%s);" % (item, ", ".join(providers_list)))
404 bb.msg.note(1, bb.msg.domain.Provider, "consider defining PREFERRED_PROVIDER_%s" % item)
405 bb.event.fire(bb.event.MultipleProviders(item, providers_list), cfgData) 393 bb.event.fire(bb.event.MultipleProviders(item, providers_list), cfgData)
406 self.consider_msgs_cache.append(item) 394 self.consider_msgs_cache.append(item)
407 395
@@ -431,16 +419,14 @@ class TaskData:
431 all_p = bb.providers.getRuntimeProviders(dataCache, item) 419 all_p = bb.providers.getRuntimeProviders(dataCache, item)
432 420
433 if not all_p: 421 if not all_p:
434 bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables" % (self.get_rdependees_str(item), item)) 422 bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item)), cfgData)
435 bb.event.fire(bb.event.NoProvider(item, runtime=True), cfgData)
436 raise bb.providers.NoRProvider(item) 423 raise bb.providers.NoRProvider(item)
437 424
438 eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache) 425 eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
439 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids] 426 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
440 427
441 if not eligible: 428 if not eligible:
442 bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables of any buildable targets.\nEnable debugging and see earlier logs to find unbuildable targets." % (self.get_rdependees_str(item), item)) 429 bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item)), cfgData)
443 bb.event.fire(bb.event.NoProvider(item, runtime=True), cfgData)
444 raise bb.providers.NoRProvider(item) 430 raise bb.providers.NoRProvider(item)
445 431
446 if len(eligible) > 1 and numberPreferred == 0: 432 if len(eligible) > 1 and numberPreferred == 0:
@@ -448,9 +434,7 @@ class TaskData:
448 providers_list = [] 434 providers_list = []
449 for fn in eligible: 435 for fn in eligible:
450 providers_list.append(dataCache.pkg_fn[fn]) 436 providers_list.append(dataCache.pkg_fn[fn])
451 bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (%s);" % (item, ", ".join(providers_list))) 437 bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
452 bb.msg.note(2, bb.msg.domain.Provider, "consider defining a PREFERRED_PROVIDER entry to match runtime %s" % item)
453 bb.event.fire(bb.event.MultipleProviders(item,providers_list, runtime=True), cfgData)
454 self.consider_msgs_cache.append(item) 438 self.consider_msgs_cache.append(item)
455 439
456 if numberPreferred > 1: 440 if numberPreferred > 1:
@@ -458,9 +442,7 @@ class TaskData:
458 providers_list = [] 442 providers_list = []
459 for fn in eligible: 443 for fn in eligible:
460 providers_list.append(dataCache.pkg_fn[fn]) 444 providers_list.append(dataCache.pkg_fn[fn])
461 bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (top %s entries preferred) (%s);" % (item, numberPreferred, ", ".join(providers_list))) 445 bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
462 bb.msg.note(2, bb.msg.domain.Provider, "consider defining only one PREFERRED_PROVIDER entry to match runtime %s" % item)
463 bb.event.fire(bb.event.MultipleProviders(item,providers_list, runtime=True), cfgData)
464 self.consider_msgs_cache.append(item) 446 self.consider_msgs_cache.append(item)
465 447
466 # run through the list until we find one that we can build 448 # run through the list until we find one that we can build
@@ -515,8 +497,9 @@ class TaskData:
515 self.fail_fnid(self.tasks_fnid[taskid], missing_list) 497 self.fail_fnid(self.tasks_fnid[taskid], missing_list)
516 498
517 if self.abort and targetid in self.external_targets: 499 if self.abort and targetid in self.external_targets:
518 bb.msg.error(bb.msg.domain.Provider, "Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s" % (self.build_names_index[targetid], missing_list)) 500 target = self.build_names_index[targetid]
519 raise bb.providers.NoProvider 501 bb.msg.error(bb.msg.domain.Provider, "Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s" % (target, missing_list))
502 raise bb.providers.NoProvider(target)
520 503
521 def remove_runtarget(self, targetid, missing_list = []): 504 def remove_runtarget(self, targetid, missing_list = []):
522 """ 505 """
@@ -539,7 +522,7 @@ class TaskData:
539 Resolve all unresolved build and runtime targets 522 Resolve all unresolved build and runtime targets
540 """ 523 """
541 bb.msg.note(1, bb.msg.domain.TaskData, "Resolving any missing task queue dependencies") 524 bb.msg.note(1, bb.msg.domain.TaskData, "Resolving any missing task queue dependencies")
542 while 1: 525 while True:
543 added = 0 526 added = 0
544 for target in self.get_unresolved_build_targets(dataCache): 527 for target in self.get_unresolved_build_targets(dataCache):
545 try: 528 try:
@@ -548,10 +531,6 @@ class TaskData:
548 except bb.providers.NoProvider: 531 except bb.providers.NoProvider:
549 targetid = self.getbuild_id(target) 532 targetid = self.getbuild_id(target)
550 if self.abort and targetid in self.external_targets: 533 if self.abort and targetid in self.external_targets:
551 if self.get_rdependees_str(target):
552 bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (target, self.get_dependees_str(target)))
553 else:
554 bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (target))
555 raise 534 raise
556 self.remove_buildtarget(targetid) 535 self.remove_buildtarget(targetid)
557 for target in self.get_unresolved_run_targets(dataCache): 536 for target in self.get_unresolved_run_targets(dataCache):
@@ -594,9 +573,9 @@ class TaskData:
594 bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:") 573 bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:")
595 for task in range(len(self.tasks_name)): 574 for task in range(len(self.tasks_name)):
596 bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % ( 575 bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % (
597 task, 576 task,
598 self.fn_index[self.tasks_fnid[task]], 577 self.fn_index[self.tasks_fnid[task]],
599 self.tasks_name[task], 578 self.tasks_name[task],
600 self.tasks_tdepends[task])) 579 self.tasks_tdepends[task]))
601 580
602 bb.msg.debug(3, bb.msg.domain.TaskData, "dependency ids (per fn):") 581 bb.msg.debug(3, bb.msg.domain.TaskData, "dependency ids (per fn):")
@@ -606,5 +585,3 @@ class TaskData:
606 bb.msg.debug(3, bb.msg.domain.TaskData, "runtime dependency ids (per fn):") 585 bb.msg.debug(3, bb.msg.domain.TaskData, "runtime dependency ids (per fn):")
607 for fnid in self.rdepids: 586 for fnid in self.rdepids:
608 bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid])) 587 bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid]))
609
610
diff --git a/bitbake/lib/bb/ui/__init__.py b/bitbake/lib/bb/ui/__init__.py
index c6a377a8e6..a4805ed028 100644
--- a/bitbake/lib/bb/ui/__init__.py
+++ b/bitbake/lib/bb/ui/__init__.py
@@ -15,4 +15,3 @@
15# You should have received a copy of the GNU General Public License along 15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc., 16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
diff --git a/bitbake/lib/bb/ui/crumbs/__init__.py b/bitbake/lib/bb/ui/crumbs/__init__.py
index c6a377a8e6..a4805ed028 100644
--- a/bitbake/lib/bb/ui/crumbs/__init__.py
+++ b/bitbake/lib/bb/ui/crumbs/__init__.py
@@ -15,4 +15,3 @@
15# You should have received a copy of the GNU General Public License along 15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc., 16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
diff --git a/bitbake/lib/bb/ui/crumbs/buildmanager.py b/bitbake/lib/bb/ui/crumbs/buildmanager.py
index f89e8eefd4..e858d75e4c 100644
--- a/bitbake/lib/bb/ui/crumbs/buildmanager.py
+++ b/bitbake/lib/bb/ui/crumbs/buildmanager.py
@@ -28,7 +28,7 @@ import time
28class BuildConfiguration: 28class BuildConfiguration:
29 """ Represents a potential *or* historic *or* concrete build. It 29 """ Represents a potential *or* historic *or* concrete build. It
30 encompasses all the things that we need to tell bitbake to do to make it 30 encompasses all the things that we need to tell bitbake to do to make it
31 build what we want it to build. 31 build what we want it to build.
32 32
33 It also stored the metadata URL and the set of possible machines (and the 33 It also stored the metadata URL and the set of possible machines (and the
34 distros / images / uris for these. Apart from the metdata URL these are 34 distros / images / uris for these. Apart from the metdata URL these are
@@ -73,34 +73,33 @@ class BuildConfiguration:
73 return self.urls 73 return self.urls
74 74
75 # It might be a lot lot better if we stored these in like, bitbake conf 75 # It might be a lot lot better if we stored these in like, bitbake conf
76 # file format. 76 # file format.
77 @staticmethod 77 @staticmethod
78 def load_from_file (filename): 78 def load_from_file (filename):
79 f = open (filename, "r")
80 79
81 conf = BuildConfiguration() 80 conf = BuildConfiguration()
82 for line in f.readlines(): 81 with open(filename, "r") as f:
83 data = line.split (";")[1] 82 for line in f:
84 if (line.startswith ("metadata-url;")): 83 data = line.split (";")[1]
85 conf.metadata_url = data.strip() 84 if (line.startswith ("metadata-url;")):
86 continue 85 conf.metadata_url = data.strip()
87 if (line.startswith ("url;")): 86 continue
88 conf.urls += [data.strip()] 87 if (line.startswith ("url;")):
89 continue 88 conf.urls += [data.strip()]
90 if (line.startswith ("extra-url;")): 89 continue
91 conf.extra_urls += [data.strip()] 90 if (line.startswith ("extra-url;")):
92 continue 91 conf.extra_urls += [data.strip()]
93 if (line.startswith ("machine;")): 92 continue
94 conf.machine = data.strip() 93 if (line.startswith ("machine;")):
95 continue 94 conf.machine = data.strip()
96 if (line.startswith ("distribution;")): 95 continue
97 conf.distro = data.strip() 96 if (line.startswith ("distribution;")):
98 continue 97 conf.distro = data.strip()
99 if (line.startswith ("image;")): 98 continue
100 conf.image = data.strip() 99 if (line.startswith ("image;")):
101 continue 100 conf.image = data.strip()
101 continue
102 102
103 f.close ()
104 return conf 103 return conf
105 104
106 # Serialise to a file. This is part of the build process and we use this 105 # Serialise to a file. This is part of the build process and we use this
@@ -140,13 +139,13 @@ class BuildResult(gobject.GObject):
140 ".conf" in the directory for the build. 139 ".conf" in the directory for the build.
141 140
142 This is GObject so that it can be included in the TreeStore.""" 141 This is GObject so that it can be included in the TreeStore."""
143 142
144 (STATE_COMPLETE, STATE_FAILED, STATE_ONGOING) = \ 143 (STATE_COMPLETE, STATE_FAILED, STATE_ONGOING) = \
145 (0, 1, 2) 144 (0, 1, 2)
146 145
147 def __init__ (self, parent, identifier): 146 def __init__ (self, parent, identifier):
148 gobject.GObject.__init__ (self) 147 gobject.GObject.__init__ (self)
149 self.date = None 148 self.date = None
150 149
151 self.files = [] 150 self.files = []
152 self.status = None 151 self.status = None
@@ -157,8 +156,8 @@ class BuildResult(gobject.GObject):
157 # format build-<year><month><day>-<ordinal> we can easily 156 # format build-<year><month><day>-<ordinal> we can easily
158 # pull it out. 157 # pull it out.
159 # TODO: Better to stat a file? 158 # TODO: Better to stat a file?
160 (_ , date, revision) = identifier.split ("-") 159 (_, date, revision) = identifier.split ("-")
161 print date 160 print(date)
162 161
163 year = int (date[0:4]) 162 year = int (date[0:4])
164 month = int (date[4:6]) 163 month = int (date[4:6])
@@ -181,7 +180,7 @@ class BuildResult(gobject.GObject):
181 self.add_file (file) 180 self.add_file (file)
182 181
183 def add_file (self, file): 182 def add_file (self, file):
184 # Just add the file for now. Don't care about the type. 183 # Just add the file for now. Don't care about the type.
185 self.files += [(file, None)] 184 self.files += [(file, None)]
186 185
187class BuildManagerModel (gtk.TreeStore): 186class BuildManagerModel (gtk.TreeStore):
@@ -194,7 +193,7 @@ class BuildManagerModel (gtk.TreeStore):
194 193
195 def __init__ (self): 194 def __init__ (self):
196 gtk.TreeStore.__init__ (self, 195 gtk.TreeStore.__init__ (self,
197 gobject.TYPE_STRING, 196 gobject.TYPE_STRING,
198 gobject.TYPE_STRING, 197 gobject.TYPE_STRING,
199 gobject.TYPE_STRING, 198 gobject.TYPE_STRING,
200 gobject.TYPE_STRING, 199 gobject.TYPE_STRING,
@@ -207,7 +206,7 @@ class BuildManager (gobject.GObject):
207 "results" directory but is also used for starting a new build.""" 206 "results" directory but is also used for starting a new build."""
208 207
209 __gsignals__ = { 208 __gsignals__ = {
210 'population-finished' : (gobject.SIGNAL_RUN_LAST, 209 'population-finished' : (gobject.SIGNAL_RUN_LAST,
211 gobject.TYPE_NONE, 210 gobject.TYPE_NONE,
212 ()), 211 ()),
213 'populate-error' : (gobject.SIGNAL_RUN_LAST, 212 'populate-error' : (gobject.SIGNAL_RUN_LAST,
@@ -220,13 +219,13 @@ class BuildManager (gobject.GObject):
220 date = long (time.mktime (result.date.timetuple())) 219 date = long (time.mktime (result.date.timetuple()))
221 220
222 # Add a top level entry for the build 221 # Add a top level entry for the build
223 222
224 self.model.set (iter, 223 self.model.set (iter,
225 BuildManagerModel.COL_IDENT, result.identifier, 224 BuildManagerModel.COL_IDENT, result.identifier,
226 BuildManagerModel.COL_DESC, result.conf.image, 225 BuildManagerModel.COL_DESC, result.conf.image,
227 BuildManagerModel.COL_MACHINE, result.conf.machine, 226 BuildManagerModel.COL_MACHINE, result.conf.machine,
228 BuildManagerModel.COL_DISTRO, result.conf.distro, 227 BuildManagerModel.COL_DISTRO, result.conf.distro,
229 BuildManagerModel.COL_BUILD_RESULT, result, 228 BuildManagerModel.COL_BUILD_RESULT, result,
230 BuildManagerModel.COL_DATE, date, 229 BuildManagerModel.COL_DATE, date,
231 BuildManagerModel.COL_STATE, result.state) 230 BuildManagerModel.COL_STATE, result.state)
232 231
@@ -257,7 +256,7 @@ class BuildManager (gobject.GObject):
257 256
258 while (iter): 257 while (iter):
259 (ident, state) = self.model.get(iter, 258 (ident, state) = self.model.get(iter,
260 BuildManagerModel.COL_IDENT, 259 BuildManagerModel.COL_IDENT,
261 BuildManagerModel.COL_STATE) 260 BuildManagerModel.COL_STATE)
262 261
263 if state == BuildResult.STATE_ONGOING: 262 if state == BuildResult.STATE_ONGOING:
@@ -385,8 +384,8 @@ class BuildManager (gobject.GObject):
385 build_directory]) 384 build_directory])
386 server.runCommand(["buildTargets", [conf.image], "rootfs"]) 385 server.runCommand(["buildTargets", [conf.image], "rootfs"])
387 386
388 except Exception, e: 387 except Exception as e:
389 print e 388 print(e)
390 389
391class BuildManagerTreeView (gtk.TreeView): 390class BuildManagerTreeView (gtk.TreeView):
392 """ The tree view for the build manager. This shows the historic builds 391 """ The tree view for the build manager. This shows the historic builds
@@ -422,29 +421,29 @@ class BuildManagerTreeView (gtk.TreeView):
422 421
423 # Misc descriptiony thing 422 # Misc descriptiony thing
424 renderer = gtk.CellRendererText () 423 renderer = gtk.CellRendererText ()
425 col = gtk.TreeViewColumn (None, renderer, 424 col = gtk.TreeViewColumn (None, renderer,
426 text=BuildManagerModel.COL_DESC) 425 text=BuildManagerModel.COL_DESC)
427 self.append_column (col) 426 self.append_column (col)
428 427
429 # Machine 428 # Machine
430 renderer = gtk.CellRendererText () 429 renderer = gtk.CellRendererText ()
431 col = gtk.TreeViewColumn ("Machine", renderer, 430 col = gtk.TreeViewColumn ("Machine", renderer,
432 text=BuildManagerModel.COL_MACHINE) 431 text=BuildManagerModel.COL_MACHINE)
433 self.append_column (col) 432 self.append_column (col)
434 433
435 # distro 434 # distro
436 renderer = gtk.CellRendererText () 435 renderer = gtk.CellRendererText ()
437 col = gtk.TreeViewColumn ("Distribution", renderer, 436 col = gtk.TreeViewColumn ("Distribution", renderer,
438 text=BuildManagerModel.COL_DISTRO) 437 text=BuildManagerModel.COL_DISTRO)
439 self.append_column (col) 438 self.append_column (col)
440 439
441 # date (using a custom function for formatting the cell contents it 440 # date (using a custom function for formatting the cell contents it
442 # takes epoch -> human readable string) 441 # takes epoch -> human readable string)
443 renderer = gtk.CellRendererText () 442 renderer = gtk.CellRendererText ()
444 col = gtk.TreeViewColumn ("Date", renderer, 443 col = gtk.TreeViewColumn ("Date", renderer,
445 text=BuildManagerModel.COL_DATE) 444 text=BuildManagerModel.COL_DATE)
446 self.append_column (col) 445 self.append_column (col)
447 col.set_cell_data_func (renderer, 446 col.set_cell_data_func (renderer,
448 self.date_format_custom_cell_data_func) 447 self.date_format_custom_cell_data_func)
449 448
450 # For status. 449 # For status.
@@ -454,4 +453,3 @@ class BuildManagerTreeView (gtk.TreeView):
454 self.append_column (col) 453 self.append_column (col)
455 col.set_cell_data_func (renderer, 454 col.set_cell_data_func (renderer,
456 self.state_format_custom_cell_data_fun) 455 self.state_format_custom_cell_data_fun)
457
diff --git a/bitbake/lib/bb/ui/crumbs/runningbuild.py b/bitbake/lib/bb/ui/crumbs/runningbuild.py
index 18afd6674d..b4416ecbb3 100644
--- a/bitbake/lib/bb/ui/crumbs/runningbuild.py
+++ b/bitbake/lib/bb/ui/crumbs/runningbuild.py
@@ -24,7 +24,7 @@ import gobject
24class RunningBuildModel (gtk.TreeStore): 24class RunningBuildModel (gtk.TreeStore):
25 (COL_TYPE, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_ACTIVE) = (0, 1, 2, 3, 4, 5) 25 (COL_TYPE, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_ACTIVE) = (0, 1, 2, 3, 4, 5)
26 def __init__ (self): 26 def __init__ (self):
27 gtk.TreeStore.__init__ (self, 27 gtk.TreeStore.__init__ (self,
28 gobject.TYPE_STRING, 28 gobject.TYPE_STRING,
29 gobject.TYPE_STRING, 29 gobject.TYPE_STRING,
30 gobject.TYPE_STRING, 30 gobject.TYPE_STRING,
@@ -34,7 +34,7 @@ class RunningBuildModel (gtk.TreeStore):
34 34
35class RunningBuild (gobject.GObject): 35class RunningBuild (gobject.GObject):
36 __gsignals__ = { 36 __gsignals__ = {
37 'build-succeeded' : (gobject.SIGNAL_RUN_LAST, 37 'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
38 gobject.TYPE_NONE, 38 gobject.TYPE_NONE,
39 ()), 39 ()),
40 'build-failed' : (gobject.SIGNAL_RUN_LAST, 40 'build-failed' : (gobject.SIGNAL_RUN_LAST,
@@ -63,7 +63,7 @@ class RunningBuild (gobject.GObject):
63 # for the message. 63 # for the message.
64 if hasattr(event, 'pid'): 64 if hasattr(event, 'pid'):
65 pid = event.pid 65 pid = event.pid
66 if self.pids_to_task.has_key(pid): 66 if pid in self.pids_to_task:
67 (package, task) = self.pids_to_task[pid] 67 (package, task) = self.pids_to_task[pid]
68 parent = self.tasks_to_iter[(package, task)] 68 parent = self.tasks_to_iter[(package, task)]
69 69
@@ -82,29 +82,29 @@ class RunningBuild (gobject.GObject):
82 82
83 # Add the message to the tree either at the top level if parent is 83 # Add the message to the tree either at the top level if parent is
84 # None otherwise as a descendent of a task. 84 # None otherwise as a descendent of a task.
85 self.model.append (parent, 85 self.model.append (parent,
86 (event.__name__.split()[-1], # e.g. MsgWarn, MsgError 86 (event.__name__.split()[-1], # e.g. MsgWarn, MsgError
87 package, 87 package,
88 task, 88 task,
89 event._message, 89 event._message,
90 icon, 90 icon,
91 False)) 91 False))
92 elif isinstance(event, bb.build.TaskStarted): 92 elif isinstance(event, bb.build.TaskStarted):
93 (package, task) = (event._package, event._task) 93 (package, task) = (event._package, event._task)
94 94
95 # Save out this PID. 95 # Save out this PID.
96 self.pids_to_task[pid] = (package,task) 96 self.pids_to_task[pid] = (package, task)
97 97
98 # Check if we already have this package in our model. If so then 98 # Check if we already have this package in our model. If so then
99 # that can be the parent for the task. Otherwise we create a new 99 # that can be the parent for the task. Otherwise we create a new
100 # top level for the package. 100 # top level for the package.
101 if (self.tasks_to_iter.has_key ((package, None))): 101 if ((package, None) in self.tasks_to_iter):
102 parent = self.tasks_to_iter[(package, None)] 102 parent = self.tasks_to_iter[(package, None)]
103 else: 103 else:
104 parent = self.model.append (None, (None, 104 parent = self.model.append (None, (None,
105 package, 105 package,
106 None, 106 None,
107 "Package: %s" % (package), 107 "Package: %s" % (package),
108 None, 108 None,
109 False)) 109 False))
110 self.tasks_to_iter[(package, None)] = parent 110 self.tasks_to_iter[(package, None)] = parent
@@ -114,10 +114,10 @@ class RunningBuild (gobject.GObject):
114 self.model.set(parent, self.model.COL_ICON, "gtk-execute") 114 self.model.set(parent, self.model.COL_ICON, "gtk-execute")
115 115
116 # Add an entry in the model for this task 116 # Add an entry in the model for this task
117 i = self.model.append (parent, (None, 117 i = self.model.append (parent, (None,
118 package, 118 package,
119 task, 119 task,
120 "Task: %s" % (task), 120 "Task: %s" % (task),
121 None, 121 None,
122 False)) 122 False))
123 123
@@ -176,5 +176,3 @@ class RunningBuildTreeView (gtk.TreeView):
176 renderer = gtk.CellRendererText () 176 renderer = gtk.CellRendererText ()
177 col = gtk.TreeViewColumn ("Message", renderer, text=3) 177 col = gtk.TreeViewColumn ("Message", renderer, text=3)
178 self.append_column (col) 178 self.append_column (col)
179
180
diff --git a/bitbake/lib/bb/ui/depexp.py b/bitbake/lib/bb/ui/depexp.py
index cfa5b6564e..1cd58cac18 100644
--- a/bitbake/lib/bb/ui/depexp.py
+++ b/bitbake/lib/bb/ui/depexp.py
@@ -201,14 +201,14 @@ def init(server, eventHandler):
201 try: 201 try:
202 cmdline = server.runCommand(["getCmdLineAction"]) 202 cmdline = server.runCommand(["getCmdLineAction"])
203 if not cmdline or cmdline[0] != "generateDotGraph": 203 if not cmdline or cmdline[0] != "generateDotGraph":
204 print "This UI is only compatible with the -g option" 204 print("This UI is only compatible with the -g option")
205 return 205 return
206 ret = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]]) 206 ret = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
207 if ret != True: 207 if ret != True:
208 print "Couldn't run command! %s" % ret 208 print("Couldn't run command! %s" % ret)
209 return 209 return
210 except xmlrpclib.Fault, x: 210 except xmlrpclib.Fault as x:
211 print "XMLRPC Fault getting commandline:\n %s" % x 211 print("XMLRPC Fault getting commandline:\n %s" % x)
212 return 212 return
213 213
214 shutdown = 0 214 shutdown = 0
@@ -233,8 +233,8 @@ def init(server, eventHandler):
233 x = event.sofar 233 x = event.sofar
234 y = event.total 234 y = event.total
235 if x == y: 235 if x == y:
236 print("\nParsing finished. %d cached, %d parsed, %d skipped, %d masked, %d errors." 236 print(("\nParsing finished. %d cached, %d parsed, %d skipped, %d masked, %d errors."
237 % ( event.cached, event.parsed, event.skipped, event.masked, event.errors)) 237 % ( event.cached, event.parsed, event.skipped, event.masked, event.errors)))
238 pbar.hide() 238 pbar.hide()
239 gtk.gdk.threads_enter() 239 gtk.gdk.threads_enter()
240 pbar.progress.set_fraction(float(x)/float(y)) 240 pbar.progress.set_fraction(float(x)/float(y))
@@ -250,7 +250,7 @@ def init(server, eventHandler):
250 if isinstance(event, bb.command.CookerCommandCompleted): 250 if isinstance(event, bb.command.CookerCommandCompleted):
251 continue 251 continue
252 if isinstance(event, bb.command.CookerCommandFailed): 252 if isinstance(event, bb.command.CookerCommandFailed):
253 print "Command execution failed: %s" % event.error 253 print("Command execution failed: %s" % event.error)
254 break 254 break
255 if isinstance(event, bb.cooker.CookerExit): 255 if isinstance(event, bb.cooker.CookerExit):
256 break 256 break
@@ -259,14 +259,13 @@ def init(server, eventHandler):
259 259
260 except KeyboardInterrupt: 260 except KeyboardInterrupt:
261 if shutdown == 2: 261 if shutdown == 2:
262 print "\nThird Keyboard Interrupt, exit.\n" 262 print("\nThird Keyboard Interrupt, exit.\n")
263 break 263 break
264 if shutdown == 1: 264 if shutdown == 1:
265 print "\nSecond Keyboard Interrupt, stopping...\n" 265 print("\nSecond Keyboard Interrupt, stopping...\n")
266 server.runCommand(["stateStop"]) 266 server.runCommand(["stateStop"])
267 if shutdown == 0: 267 if shutdown == 0:
268 print "\nKeyboard Interrupt, closing down...\n" 268 print("\nKeyboard Interrupt, closing down...\n")
269 server.runCommand(["stateShutdown"]) 269 server.runCommand(["stateShutdown"])
270 shutdown = shutdown + 1 270 shutdown = shutdown + 1
271 pass 271 pass
272
diff --git a/bitbake/lib/bb/ui/goggle.py b/bitbake/lib/bb/ui/goggle.py
index 94995d82db..2cfa002f8a 100644
--- a/bitbake/lib/bb/ui/goggle.py
+++ b/bitbake/lib/bb/ui/goggle.py
@@ -25,13 +25,13 @@ from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
25 25
26def event_handle_idle_func (eventHandler, build): 26def event_handle_idle_func (eventHandler, build):
27 27
28 # Consume as many messages as we can in the time available to us 28 # Consume as many messages as we can in the time available to us
29 event = eventHandler.getEvent() 29 event = eventHandler.getEvent()
30 while event: 30 while event:
31 build.handle_event (event) 31 build.handle_event (event)
32 event = eventHandler.getEvent() 32 event = eventHandler.getEvent()
33 33
34 return True 34 return True
35 35
36class MainWindow (gtk.Window): 36class MainWindow (gtk.Window):
37 def __init__ (self): 37 def __init__ (self):
@@ -55,15 +55,15 @@ def init (server, eventHandler):
55 window.cur_build_tv.set_model (running_build.model) 55 window.cur_build_tv.set_model (running_build.model)
56 try: 56 try:
57 cmdline = server.runCommand(["getCmdLineAction"]) 57 cmdline = server.runCommand(["getCmdLineAction"])
58 print cmdline 58 print(cmdline)
59 if not cmdline: 59 if not cmdline:
60 return 1 60 return 1
61 ret = server.runCommand(cmdline) 61 ret = server.runCommand(cmdline)
62 if ret != True: 62 if ret != True:
63 print "Couldn't get default commandline! %s" % ret 63 print("Couldn't get default commandline! %s" % ret)
64 return 1 64 return 1
65 except xmlrpclib.Fault, x: 65 except xmlrpclib.Fault as x:
66 print "XMLRPC Fault getting commandline:\n %s" % x 66 print("XMLRPC Fault getting commandline:\n %s" % x)
67 return 1 67 return 1
68 68
69 # Use a timeout function for probing the event queue to find out if we 69 # Use a timeout function for probing the event queue to find out if we
@@ -74,4 +74,3 @@ def init (server, eventHandler):
74 running_build) 74 running_build)
75 75
76 gtk.main() 76 gtk.main()
77
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index ed26bb2b4c..f81759abf8 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -18,8 +18,9 @@
18# with this program; if not, write to the Free Software Foundation, Inc., 18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20 20
21import os 21from __future__ import division
22 22
23import os
23import sys 24import sys
24import itertools 25import itertools
25import xmlrpclib 26import xmlrpclib
@@ -44,10 +45,10 @@ def init(server, eventHandler):
44 return 1 45 return 1
45 ret = server.runCommand(cmdline) 46 ret = server.runCommand(cmdline)
46 if ret != True: 47 if ret != True:
47 print "Couldn't get default commandline! %s" % ret 48 print("Couldn't get default commandline! %s" % ret)
48 return 1 49 return 1
49 except xmlrpclib.Fault, x: 50 except xmlrpclib.Fault as x:
50 print "XMLRPC Fault getting commandline:\n %s" % x 51 print("XMLRPC Fault getting commandline:\n %s" % x)
51 return 1 52 return 1
52 53
53 shutdown = 0 54 shutdown = 0
@@ -65,39 +66,39 @@ def init(server, eventHandler):
65 if shutdown and helper.needUpdate: 66 if shutdown and helper.needUpdate:
66 activetasks, failedtasks = helper.getTasks() 67 activetasks, failedtasks = helper.getTasks()
67 if activetasks: 68 if activetasks:
68 print "Waiting for %s active tasks to finish:" % len(activetasks) 69 print("Waiting for %s active tasks to finish:" % len(activetasks))
69 tasknum = 1 70 tasknum = 1
70 for task in activetasks: 71 for task in activetasks:
71 print "%s: %s (pid %s)" % (tasknum, activetasks[task]["title"], task) 72 print("%s: %s (pid %s)" % (tasknum, activetasks[task]["title"], task))
72 tasknum = tasknum + 1 73 tasknum = tasknum + 1
73 74
74 if isinstance(event, bb.msg.MsgPlain): 75 if isinstance(event, bb.msg.MsgPlain):
75 print event._message 76 print(event._message)
76 continue 77 continue
77 if isinstance(event, bb.msg.MsgDebug): 78 if isinstance(event, bb.msg.MsgDebug):
78 print 'DEBUG: ' + event._message 79 print('DEBUG: ' + event._message)
79 continue 80 continue
80 if isinstance(event, bb.msg.MsgNote): 81 if isinstance(event, bb.msg.MsgNote):
81 print 'NOTE: ' + event._message 82 print('NOTE: ' + event._message)
82 continue 83 continue
83 if isinstance(event, bb.msg.MsgWarn): 84 if isinstance(event, bb.msg.MsgWarn):
84 print 'WARNING: ' + event._message 85 print('WARNING: ' + event._message)
85 continue 86 continue
86 if isinstance(event, bb.msg.MsgError): 87 if isinstance(event, bb.msg.MsgError):
87 return_value = 1 88 return_value = 1
88 print 'ERROR: ' + event._message 89 print('ERROR: ' + event._message)
89 continue 90 continue
90 if isinstance(event, bb.msg.MsgFatal): 91 if isinstance(event, bb.msg.MsgFatal):
91 return_value = 1 92 return_value = 1
92 print 'FATAL: ' + event._message 93 print('FATAL: ' + event._message)
93 break 94 break
94 if isinstance(event, bb.build.TaskFailed): 95 if isinstance(event, bb.build.TaskFailed):
95 return_value = 1 96 return_value = 1
96 logfile = event.logfile 97 logfile = event.logfile
97 if logfile: 98 if logfile and os.path.exists(logfile):
98 print "ERROR: Logfile of failure stored in: %s" % logfile 99 print("ERROR: Logfile of failure stored in: %s" % logfile)
99 if 1 or includelogs: 100 if 1 or includelogs:
100 print "Log data follows:" 101 print("Log data follows:")
101 f = open(logfile, "r") 102 f = open(logfile, "r")
102 lines = [] 103 lines = []
103 while True: 104 while True:
@@ -110,19 +111,19 @@ def init(server, eventHandler):
110 if len(lines) > int(loglines): 111 if len(lines) > int(loglines):
111 lines.pop(0) 112 lines.pop(0)
112 else: 113 else:
113 print '| %s' % l 114 print('| %s' % l)
114 f.close() 115 f.close()
115 if lines: 116 if lines:
116 for line in lines: 117 for line in lines:
117 print line 118 print(line)
118 if isinstance(event, bb.build.TaskBase): 119 if isinstance(event, bb.build.TaskBase):
119 print "NOTE: %s" % event._message 120 print("NOTE: %s" % event._message)
120 continue 121 continue
121 if isinstance(event, bb.event.ParseProgress): 122 if isinstance(event, bb.event.ParseProgress):
122 x = event.sofar 123 x = event.sofar
123 y = event.total 124 y = event.total
124 if os.isatty(sys.stdout.fileno()): 125 if os.isatty(sys.stdout.fileno()):
125 sys.stdout.write("\rNOTE: Handling BitBake files: %s (%04d/%04d) [%2d %%]" % ( parsespin.next(), x, y, x*100/y ) ) 126 sys.stdout.write("\rNOTE: Handling BitBake files: %s (%04d/%04d) [%2d %%]" % ( next(parsespin), x, y, x*100//y ) )
126 sys.stdout.flush() 127 sys.stdout.flush()
127 else: 128 else:
128 if x == 1: 129 if x == 1:
@@ -132,8 +133,8 @@ def init(server, eventHandler):
132 sys.stdout.write("done.") 133 sys.stdout.write("done.")
133 sys.stdout.flush() 134 sys.stdout.flush()
134 if x == y: 135 if x == y:
135 print("\nParsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors." 136 print(("\nParsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
136 % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)) 137 % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
137 continue 138 continue
138 139
139 if isinstance(event, bb.command.CookerCommandCompleted): 140 if isinstance(event, bb.command.CookerCommandCompleted):
@@ -143,39 +144,48 @@ def init(server, eventHandler):
143 continue 144 continue
144 if isinstance(event, bb.command.CookerCommandFailed): 145 if isinstance(event, bb.command.CookerCommandFailed):
145 return_value = 1 146 return_value = 1
146 print "Command execution failed: %s" % event.error 147 print("Command execution failed: %s" % event.error)
147 break 148 break
148 if isinstance(event, bb.cooker.CookerExit): 149 if isinstance(event, bb.cooker.CookerExit):
149 break 150 break
150
151 # ignore
152 if isinstance(event, bb.event.BuildStarted):
153 continue
154 if isinstance(event, bb.event.BuildCompleted):
155 continue
156 if isinstance(event, bb.event.MultipleProviders): 151 if isinstance(event, bb.event.MultipleProviders):
152 print("NOTE: multiple providers are available for %s%s (%s)" % (event._is_runtime and "runtime " or "",
153 event._item,
154 ", ".join(event._candidates)))
155 print("NOTE: consider defining a PREFERRED_PROVIDER entry to match %s" % event._item)
156 continue
157 if isinstance(event, bb.event.NoProvider):
158 if event._runtime:
159 r = "R"
160 else:
161 r = ""
162
163 if event._dependees:
164 print("ERROR: Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)" % (r, event._item, ", ".join(event._dependees), r))
165 else:
166 print("ERROR: Nothing %sPROVIDES '%s'" % (r, event._item))
157 continue 167 continue
158 if isinstance(event, bb.runqueue.runQueueEvent): 168
159 continue 169 # ignore
160 if isinstance(event, bb.runqueue.runQueueExitWait): 170 if isinstance(event, (bb.event.BuildBase,
161 continue 171 bb.event.StampUpdate,
162 if isinstance(event, bb.event.StampUpdate): 172 bb.event.ConfigParsed,
163 continue 173 bb.event.RecipeParsed,
164 if isinstance(event, bb.event.ConfigParsed): 174 bb.runqueue.runQueueEvent,
165 continue 175 bb.runqueue.runQueueExitWait)):
166 if isinstance(event, bb.event.RecipeParsed):
167 continue 176 continue
168 print "Unknown Event: %s" % event 177
178 print("Unknown Event: %s" % event)
169 179
170 except KeyboardInterrupt: 180 except KeyboardInterrupt:
171 if shutdown == 2: 181 if shutdown == 2:
172 print "\nThird Keyboard Interrupt, exit.\n" 182 print("\nThird Keyboard Interrupt, exit.\n")
173 break 183 break
174 if shutdown == 1: 184 if shutdown == 1:
175 print "\nSecond Keyboard Interrupt, stopping...\n" 185 print("\nSecond Keyboard Interrupt, stopping...\n")
176 server.runCommand(["stateStop"]) 186 server.runCommand(["stateStop"])
177 if shutdown == 0: 187 if shutdown == 0:
178 print "\nKeyboard Interrupt, closing down...\n" 188 print("\nKeyboard Interrupt, closing down...\n")
179 server.runCommand(["stateShutdown"]) 189 server.runCommand(["stateShutdown"])
180 shutdown = shutdown + 1 190 shutdown = shutdown + 1
181 pass 191 pass
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
index 14310dc124..3fed4c58a8 100644
--- a/bitbake/lib/bb/ui/ncurses.py
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -44,6 +44,8 @@
44 44
45""" 45"""
46 46
47from __future__ import division
48
47import os, sys, curses, itertools, time 49import os, sys, curses, itertools, time
48import bb 50import bb
49import xmlrpclib 51import xmlrpclib
@@ -136,7 +138,7 @@ class NCursesUI:
136 """Thread Activity Window""" 138 """Thread Activity Window"""
137 def __init__( self, x, y, width, height ): 139 def __init__( self, x, y, width, height ):
138 NCursesUI.DecoratedWindow.__init__( self, "Thread Activity", x, y, width, height ) 140 NCursesUI.DecoratedWindow.__init__( self, "Thread Activity", x, y, width, height )
139 141
140 def setStatus( self, thread, text ): 142 def setStatus( self, thread, text ):
141 line = "%02d: %s" % ( thread, text ) 143 line = "%02d: %s" % ( thread, text )
142 width = self.dimensions[WIDTH] 144 width = self.dimensions[WIDTH]
@@ -199,8 +201,8 @@ class NCursesUI:
199 201
200 main_left = 0 202 main_left = 0
201 main_top = 0 203 main_top = 0
202 main_height = ( height / 3 * 2 ) 204 main_height = ( height // 3 * 2 )
203 main_width = ( width / 3 ) * 2 205 main_width = ( width // 3 ) * 2
204 clo_left = main_left 206 clo_left = main_left
205 clo_top = main_top + main_height 207 clo_top = main_top + main_height
206 clo_height = height - main_height - main_top - 1 208 clo_height = height - main_height - main_top - 1
@@ -225,17 +227,17 @@ class NCursesUI:
225 227
226 helper = uihelper.BBUIHelper() 228 helper = uihelper.BBUIHelper()
227 shutdown = 0 229 shutdown = 0
228 230
229 try: 231 try:
230 cmdline = server.runCommand(["getCmdLineAction"]) 232 cmdline = server.runCommand(["getCmdLineAction"])
231 if not cmdline: 233 if not cmdline:
232 return 234 return
233 ret = server.runCommand(cmdline) 235 ret = server.runCommand(cmdline)
234 if ret != True: 236 if ret != True:
235 print "Couldn't get default commandlind! %s" % ret 237 print("Couldn't get default commandlind! %s" % ret)
236 return 238 return
237 except xmlrpclib.Fault, x: 239 except xmlrpclib.Fault as x:
238 print "XMLRPC Fault getting commandline:\n %s" % x 240 print("XMLRPC Fault getting commandline:\n %s" % x)
239 return 241 return
240 242
241 exitflag = False 243 exitflag = False
@@ -246,7 +248,7 @@ class NCursesUI:
246 continue 248 continue
247 helper.eventHandler(event) 249 helper.eventHandler(event)
248 #mw.appendText("%s\n" % event[0]) 250 #mw.appendText("%s\n" % event[0])
249 if isinstance(event, bb.build.Task): 251 if isinstance(event, bb.build.TaskBase):
250 mw.appendText("NOTE: %s\n" % event._message) 252 mw.appendText("NOTE: %s\n" % event._message)
251 if isinstance(event, bb.msg.MsgDebug): 253 if isinstance(event, bb.msg.MsgDebug):
252 mw.appendText('DEBUG: ' + event._message + '\n') 254 mw.appendText('DEBUG: ' + event._message + '\n')
@@ -263,10 +265,10 @@ class NCursesUI:
263 y = event.total 265 y = event.total
264 if x == y: 266 if x == y:
265 mw.setStatus("Idle") 267 mw.setStatus("Idle")
266 mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked." 268 mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked."
267 % ( event.cached, event.parsed, event.skipped, event.masked )) 269 % ( event.cached, event.parsed, event.skipped, event.masked ))
268 else: 270 else:
269 mw.setStatus("Parsing: %s (%04d/%04d) [%2d %%]" % ( parsespin.next(), x, y, x*100/y ) ) 271 mw.setStatus("Parsing: %s (%04d/%04d) [%2d %%]" % ( next(parsespin), x, y, x*100//y ) )
270# if isinstance(event, bb.build.TaskFailed): 272# if isinstance(event, bb.build.TaskFailed):
271# if event.logfile: 273# if event.logfile:
272# if data.getVar("BBINCLUDELOGS", d): 274# if data.getVar("BBINCLUDELOGS", d):
@@ -301,12 +303,12 @@ class NCursesUI:
301 taw.setText(0, 0, "") 303 taw.setText(0, 0, "")
302 if activetasks: 304 if activetasks:
303 taw.appendText("Active Tasks:\n") 305 taw.appendText("Active Tasks:\n")
304 for task in activetasks: 306 for task in activetasks.itervalues():
305 taw.appendText(task) 307 taw.appendText(task["title"])
306 if failedtasks: 308 if failedtasks:
307 taw.appendText("Failed Tasks:\n") 309 taw.appendText("Failed Tasks:\n")
308 for task in failedtasks: 310 for task in failedtasks:
309 taw.appendText(task) 311 taw.appendText(task["title"])
310 312
311 curses.doupdate() 313 curses.doupdate()
312 except KeyboardInterrupt: 314 except KeyboardInterrupt:
@@ -324,7 +326,7 @@ class NCursesUI:
324 326
325def init(server, eventHandler): 327def init(server, eventHandler):
326 if not os.isatty(sys.stdout.fileno()): 328 if not os.isatty(sys.stdout.fileno()):
327 print "FATAL: Unable to run 'ncurses' UI without a TTY." 329 print("FATAL: Unable to run 'ncurses' UI without a TTY.")
328 return 330 return
329 ui = NCursesUI() 331 ui = NCursesUI()
330 try: 332 try:
@@ -332,4 +334,3 @@ def init(server, eventHandler):
332 except: 334 except:
333 import traceback 335 import traceback
334 traceback.print_exc() 336 traceback.print_exc()
335
diff --git a/bitbake/lib/bb/ui/puccho.py b/bitbake/lib/bb/ui/puccho.py
index 713aa1f4a6..a627fc803f 100644
--- a/bitbake/lib/bb/ui/puccho.py
+++ b/bitbake/lib/bb/ui/puccho.py
@@ -24,6 +24,7 @@ import gtk.glade
24import threading 24import threading
25import urllib2 25import urllib2
26import os 26import os
27import contextlib
27 28
28from bb.ui.crumbs.buildmanager import BuildManager, BuildConfiguration 29from bb.ui.crumbs.buildmanager import BuildManager, BuildConfiguration
29from bb.ui.crumbs.buildmanager import BuildManagerTreeView 30from bb.ui.crumbs.buildmanager import BuildManagerTreeView
@@ -38,7 +39,7 @@ class MetaDataLoader(gobject.GObject):
38 on what machines are available. The distribution and images available for 39 on what machines are available. The distribution and images available for
39 the machine and the the uris to use for building the given machine.""" 40 the machine and the the uris to use for building the given machine."""
40 __gsignals__ = { 41 __gsignals__ = {
41 'success' : (gobject.SIGNAL_RUN_LAST, 42 'success' : (gobject.SIGNAL_RUN_LAST,
42 gobject.TYPE_NONE, 43 gobject.TYPE_NONE,
43 ()), 44 ()),
44 'error' : (gobject.SIGNAL_RUN_LAST, 45 'error' : (gobject.SIGNAL_RUN_LAST,
@@ -77,20 +78,19 @@ class MetaDataLoader(gobject.GObject):
77 def run (self): 78 def run (self):
78 result = {} 79 result = {}
79 try: 80 try:
80 f = urllib2.urlopen (self.url) 81 with contextlib.closing (urllib2.urlopen (self.url)) as f:
81 82 # Parse the metadata format. The format is....
82 # Parse the metadata format. The format is.... 83 # <machine>;<default distro>|<distro>...;<default image>|<image>...;<type##url>|...
83 # <machine>;<default distro>|<distro>...;<default image>|<image>...;<type##url>|... 84 for line in f:
84 for line in f.readlines(): 85 components = line.split(";")
85 components = line.split(";") 86 if (len (components) < 4):
86 if (len (components) < 4): 87 raise MetaDataLoader.LoaderThread.LoaderImportException
87 raise MetaDataLoader.LoaderThread.LoaderImportException 88 machine = components[0]
88 machine = components[0] 89 distros = components[1].split("|")
89 distros = components[1].split("|") 90 images = components[2].split("|")
90 images = components[2].split("|") 91 urls = components[3].split("|")
91 urls = components[3].split("|") 92
92 93 result[machine] = (distros, images, urls)
93 result[machine] = (distros, images, urls)
94 94
95 # Create an object representing this *potential* 95 # Create an object representing this *potential*
96 # configuration. It can become concrete if the machine, distro 96 # configuration. It can become concrete if the machine, distro
@@ -104,13 +104,13 @@ class MetaDataLoader(gobject.GObject):
104 gobject.idle_add (MetaDataLoader.emit_success_signal, 104 gobject.idle_add (MetaDataLoader.emit_success_signal,
105 self.loader) 105 self.loader)
106 106
107 except MetaDataLoader.LoaderThread.LoaderImportException, e: 107 except MetaDataLoader.LoaderThread.LoaderImportException as e:
108 gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader, 108 gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
109 "Repository metadata corrupt") 109 "Repository metadata corrupt")
110 except Exception, e: 110 except Exception as e:
111 gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader, 111 gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
112 "Unable to download repository metadata") 112 "Unable to download repository metadata")
113 print e 113 print(e)
114 114
115 def try_fetch_from_url (self, url): 115 def try_fetch_from_url (self, url):
116 # Try and download the metadata. Firing a signal if successful 116 # Try and download the metadata. Firing a signal if successful
@@ -211,7 +211,7 @@ class BuildSetupDialog (gtk.Dialog):
211 # Build 211 # Build
212 button = gtk.Button ("_Build", None, True) 212 button = gtk.Button ("_Build", None, True)
213 image = gtk.Image () 213 image = gtk.Image ()
214 image.set_from_stock (gtk.STOCK_EXECUTE,gtk.ICON_SIZE_BUTTON) 214 image.set_from_stock (gtk.STOCK_EXECUTE, gtk.ICON_SIZE_BUTTON)
215 button.set_image (image) 215 button.set_image (image)
216 self.add_action_widget (button, BuildSetupDialog.RESPONSE_BUILD) 216 self.add_action_widget (button, BuildSetupDialog.RESPONSE_BUILD)
217 button.show_all () 217 button.show_all ()
@@ -293,7 +293,7 @@ class BuildSetupDialog (gtk.Dialog):
293 if (active_iter): 293 if (active_iter):
294 self.configuration.machine = model.get(active_iter, 0)[0] 294 self.configuration.machine = model.get(active_iter, 0)[0]
295 295
296 # Extract the chosen distro from the combo 296 # Extract the chosen distro from the combo
297 model = self.distribution_combo.get_model() 297 model = self.distribution_combo.get_model()
298 active_iter = self.distribution_combo.get_active_iter() 298 active_iter = self.distribution_combo.get_active_iter()
299 if (active_iter): 299 if (active_iter):
@@ -311,62 +311,62 @@ class BuildSetupDialog (gtk.Dialog):
311# 311#
312# TODO: Should be a method on the RunningBuild class 312# TODO: Should be a method on the RunningBuild class
313def event_handle_timeout (eventHandler, build): 313def event_handle_timeout (eventHandler, build):
314 # Consume as many messages as we can ... 314 # Consume as many messages as we can ...
315 event = eventHandler.getEvent() 315 event = eventHandler.getEvent()
316 while event: 316 while event:
317 build.handle_event (event) 317 build.handle_event (event)
318 event = eventHandler.getEvent() 318 event = eventHandler.getEvent()
319 return True 319 return True
320 320
321class MainWindow (gtk.Window): 321class MainWindow (gtk.Window):
322 322
323 # Callback that gets fired when the user hits a button in the 323 # Callback that gets fired when the user hits a button in the
324 # BuildSetupDialog. 324 # BuildSetupDialog.
325 def build_dialog_box_response_cb (self, dialog, response_id): 325 def build_dialog_box_response_cb (self, dialog, response_id):
326 conf = None 326 conf = None
327 if (response_id == BuildSetupDialog.RESPONSE_BUILD): 327 if (response_id == BuildSetupDialog.RESPONSE_BUILD):
328 dialog.update_configuration() 328 dialog.update_configuration()
329 print dialog.configuration.machine, dialog.configuration.distro, \ 329 print(dialog.configuration.machine, dialog.configuration.distro, \
330 dialog.configuration.image 330 dialog.configuration.image)
331 conf = dialog.configuration 331 conf = dialog.configuration
332 332
333 dialog.destroy() 333 dialog.destroy()
334 334
335 if conf: 335 if conf:
336 self.manager.do_build (conf) 336 self.manager.do_build (conf)
337 337
338 def build_button_clicked_cb (self, button): 338 def build_button_clicked_cb (self, button):
339 dialog = BuildSetupDialog () 339 dialog = BuildSetupDialog ()
340 340
341 # For some unknown reason Dialog.run causes nice little deadlocks ... :-( 341 # For some unknown reason Dialog.run causes nice little deadlocks ... :-(
342 dialog.connect ("response", self.build_dialog_box_response_cb) 342 dialog.connect ("response", self.build_dialog_box_response_cb)
343 dialog.show() 343 dialog.show()
344 344
345 def __init__ (self): 345 def __init__ (self):
346 gtk.Window.__init__ (self) 346 gtk.Window.__init__ (self)
347 347
348 # Pull in *just* the main vbox from the Glade XML data and then pack 348 # Pull in *just* the main vbox from the Glade XML data and then pack
349 # that inside the window 349 # that inside the window
350 gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade", 350 gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
351 root = "main_window_vbox") 351 root = "main_window_vbox")
352 vbox = gxml.get_widget ("main_window_vbox") 352 vbox = gxml.get_widget ("main_window_vbox")
353 self.add (vbox) 353 self.add (vbox)
354 354
355 # Create the tree views for the build manager view and the progress view 355 # Create the tree views for the build manager view and the progress view
356 self.build_manager_view = BuildManagerTreeView() 356 self.build_manager_view = BuildManagerTreeView()
357 self.running_build_view = RunningBuildTreeView() 357 self.running_build_view = RunningBuildTreeView()
358 358
359 # Grab the scrolled windows that we put the tree views into 359 # Grab the scrolled windows that we put the tree views into
360 self.results_scrolledwindow = gxml.get_widget ("results_scrolledwindow") 360 self.results_scrolledwindow = gxml.get_widget ("results_scrolledwindow")
361 self.progress_scrolledwindow = gxml.get_widget ("progress_scrolledwindow") 361 self.progress_scrolledwindow = gxml.get_widget ("progress_scrolledwindow")
362 362
363 # Put the tree views inside ... 363 # Put the tree views inside ...
364 self.results_scrolledwindow.add (self.build_manager_view) 364 self.results_scrolledwindow.add (self.build_manager_view)
365 self.progress_scrolledwindow.add (self.running_build_view) 365 self.progress_scrolledwindow.add (self.running_build_view)
366 366
367 # Hook up the build button... 367 # Hook up the build button...
368 self.build_button = gxml.get_widget ("main_toolbutton_build") 368 self.build_button = gxml.get_widget ("main_toolbutton_build")
369 self.build_button.connect ("clicked", self.build_button_clicked_cb) 369 self.build_button.connect ("clicked", self.build_button_clicked_cb)
370 370
371# I'm not very happy about the current ownership of the RunningBuild. I have 371# I'm not very happy about the current ownership of the RunningBuild. I have
372# my suspicions that this object should be held by the BuildManager since we 372# my suspicions that this object should be held by the BuildManager since we
@@ -383,11 +383,11 @@ def running_build_succeeded_cb (running_build, manager):
383 # BuildManager. It can then hook onto the signals directly and drive 383 # BuildManager. It can then hook onto the signals directly and drive
384 # interesting things it cares about. 384 # interesting things it cares about.
385 manager.notify_build_succeeded () 385 manager.notify_build_succeeded ()
386 print "build succeeded" 386 print("build succeeded")
387 387
388def running_build_failed_cb (running_build, manager): 388def running_build_failed_cb (running_build, manager):
389 # As above 389 # As above
390 print "build failed" 390 print("build failed")
391 manager.notify_build_failed () 391 manager.notify_build_failed ()
392 392
393def init (server, eventHandler): 393def init (server, eventHandler):
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
index 36302f4da7..f1e4d791ee 100644
--- a/bitbake/lib/bb/ui/uievent.py
+++ b/bitbake/lib/bb/ui/uievent.py
@@ -19,7 +19,7 @@
19 19
20 20
21""" 21"""
22Use this class to fork off a thread to recieve event callbacks from the bitbake 22Use this class to fork off a thread to recieve event callbacks from the bitbake
23server and queue them for the UI to process. This process must be used to avoid 23server and queue them for the UI to process. This process must be used to avoid
24client/server deadlocks. 24client/server deadlocks.
25""" 25"""
@@ -110,16 +110,15 @@ class UIXMLRPCServer (SimpleXMLRPCServer):
110 return (sock, addr) 110 return (sock, addr)
111 except socket.timeout: 111 except socket.timeout:
112 pass 112 pass
113 return (None,None) 113 return (None, None)
114 114
115 def close_request(self, request): 115 def close_request(self, request):
116 if request is None: 116 if request is None:
117 return 117 return
118 SimpleXMLRPCServer.close_request(self, request) 118 SimpleXMLRPCServer.close_request(self, request)
119 119
120 def process_request(self, request, client_address): 120 def process_request(self, request, client_address):
121 if request is None: 121 if request is None:
122 return 122 return
123 SimpleXMLRPCServer.process_request(self, request, client_address) 123 SimpleXMLRPCServer.process_request(self, request, client_address)
124 124
125
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index 86b9c724ed..c0cc9c6ea2 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -19,10 +19,22 @@ BitBake Utility Functions
19# with this program; if not, write to the Free Software Foundation, Inc., 19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 21
22import re, fcntl, os, string, stat, shutil, time
23import sys
24import bb
25import errno
26import bb.msg
27from commands import getstatusoutput
28
29# Version comparison
22separators = ".-" 30separators = ".-"
23 31
24import re, fcntl, os, types, bb, string, stat, shutil 32# Context used in better_exec, eval
25from commands import getstatusoutput 33_context = {
34 "os": os,
35 "bb": bb,
36 "time": time,
37}
26 38
27def explode_version(s): 39def explode_version(s):
28 r = [] 40 r = []
@@ -60,9 +72,9 @@ def vercmp_part(a, b):
60 if ca == None and cb == None: 72 if ca == None and cb == None:
61 return 0 73 return 0
62 74
63 if type(ca) is types.StringType: 75 if isinstance(ca, basestring):
64 sa = ca in separators 76 sa = ca in separators
65 if type(cb) is types.StringType: 77 if isinstance(cb, basestring):
66 sb = cb in separators 78 sb = cb in separators
67 if sa and not sb: 79 if sa and not sb:
68 return -1 80 return -1
@@ -85,6 +97,131 @@ def vercmp(ta, tb):
85 r = vercmp_part(ra, rb) 97 r = vercmp_part(ra, rb)
86 return r 98 return r
87 99
100_package_weights_ = {"pre":-2, "p":0, "alpha":-4, "beta":-3, "rc":-1} # dicts are unordered
101_package_ends_ = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ] # so we need ordered list
102
103def relparse(myver):
104 """Parses the last elements of a version number into a triplet, that can
105 later be compared.
106 """
107
108 number = 0
109 p1 = 0
110 p2 = 0
111 mynewver = myver.split('_')
112 if len(mynewver) == 2:
113 # an _package_weights_
114 number = float(mynewver[0])
115 match = 0
116 for x in _package_ends_:
117 elen = len(x)
118 if mynewver[1][:elen] == x:
119 match = 1
120 p1 = _package_weights_[x]
121 try:
122 p2 = float(mynewver[1][elen:])
123 except:
124 p2 = 0
125 break
126 if not match:
127 # normal number or number with letter at end
128 divider = len(myver)-1
129 if myver[divider:] not in "1234567890":
130 # letter at end
131 p1 = ord(myver[divider:])
132 number = float(myver[0:divider])
133 else:
134 number = float(myver)
135 else:
136 # normal number or number with letter at end
137 divider = len(myver)-1
138 if myver[divider:] not in "1234567890":
139 #letter at end
140 p1 = ord(myver[divider:])
141 number = float(myver[0:divider])
142 else:
143 number = float(myver)
144 return [number, p1, p2]
145
146__vercmp_cache__ = {}
147
148def vercmp_string(val1, val2):
149 """This takes two version strings and returns an integer to tell you whether
150 the versions are the same, val1>val2 or val2>val1.
151 """
152
153 # quick short-circuit
154 if val1 == val2:
155 return 0
156 valkey = val1 + " " + val2
157
158 # cache lookup
159 try:
160 return __vercmp_cache__[valkey]
161 try:
162 return - __vercmp_cache__[val2 + " " + val1]
163 except KeyError:
164 pass
165 except KeyError:
166 pass
167
168 # consider 1_p2 vc 1.1
169 # after expansion will become (1_p2,0) vc (1,1)
170 # then 1_p2 is compared with 1 before 0 is compared with 1
171 # to solve the bug we need to convert it to (1,0_p2)
172 # by splitting _prepart part and adding it back _after_expansion
173
174 val1_prepart = val2_prepart = ''
175 if val1.count('_'):
176 val1, val1_prepart = val1.split('_', 1)
177 if val2.count('_'):
178 val2, val2_prepart = val2.split('_', 1)
179
180 # replace '-' by '.'
181 # FIXME: Is it needed? can val1/2 contain '-'?
182
183 val1 = val1.split("-")
184 if len(val1) == 2:
185 val1[0] = val1[0] + "." + val1[1]
186 val2 = val2.split("-")
187 if len(val2) == 2:
188 val2[0] = val2[0] + "." + val2[1]
189
190 val1 = val1[0].split('.')
191 val2 = val2[0].split('.')
192
193 # add back decimal point so that .03 does not become "3" !
194 for x in range(1, len(val1)):
195 if val1[x][0] == '0' :
196 val1[x] = '.' + val1[x]
197 for x in range(1, len(val2)):
198 if val2[x][0] == '0' :
199 val2[x] = '.' + val2[x]
200
201 # extend varion numbers
202 if len(val2) < len(val1):
203 val2.extend(["0"]*(len(val1)-len(val2)))
204 elif len(val1) < len(val2):
205 val1.extend(["0"]*(len(val2)-len(val1)))
206
207 # add back _prepart tails
208 if val1_prepart:
209 val1[-1] += '_' + val1_prepart
210 if val2_prepart:
211 val2[-1] += '_' + val2_prepart
212 # The above code will extend version numbers out so they
213 # have the same number of digits.
214 for x in range(0, len(val1)):
215 cmp1 = relparse(val1[x])
216 cmp2 = relparse(val2[x])
217 for y in range(0, 3):
218 myret = cmp1[y] - cmp2[y]
219 if myret != 0:
220 __vercmp_cache__[valkey] = myret
221 return myret
222 __vercmp_cache__[valkey] = 0
223 return 0
224
88def explode_deps(s): 225def explode_deps(s):
89 """ 226 """
90 Take an RDEPENDS style string of format: 227 Take an RDEPENDS style string of format:
@@ -154,26 +291,22 @@ def _print_trace(body, line):
154 """ 291 """
155 Print the Environment of a Text Body 292 Print the Environment of a Text Body
156 """ 293 """
157 import bb
158
159 # print the environment of the method 294 # print the environment of the method
160 bb.msg.error(bb.msg.domain.Util, "Printing the environment of the function") 295 bb.msg.error(bb.msg.domain.Util, "Printing the environment of the function")
161 min_line = max(1,line-4) 296 min_line = max(1, line-4)
162 max_line = min(line+4,len(body)-1) 297 max_line = min(line + 4, len(body)-1)
163 for i in range(min_line,max_line+1): 298 for i in range(min_line, max_line + 1):
164 bb.msg.error(bb.msg.domain.Util, "\t%.4d:%s" % (i, body[i-1]) ) 299 bb.msg.error(bb.msg.domain.Util, "\t%.4d:%s" % (i, body[i-1]) )
165 300
166 301
167def better_compile(text, file, realfile): 302def better_compile(text, file, realfile, mode = "exec"):
168 """ 303 """
169 A better compile method. This method 304 A better compile method. This method
170 will print the offending lines. 305 will print the offending lines.
171 """ 306 """
172 try: 307 try:
173 return compile(text, file, "exec") 308 return compile(text, file, mode)
174 except Exception, e: 309 except Exception as e:
175 import bb,sys
176
177 # split the text into lines again 310 # split the text into lines again
178 body = text.split('\n') 311 body = text.split('\n')
179 bb.msg.error(bb.msg.domain.Util, "Error in compiling python function in: ", realfile) 312 bb.msg.error(bb.msg.domain.Util, "Error in compiling python function in: ", realfile)
@@ -191,18 +324,18 @@ def better_exec(code, context, text, realfile):
191 print the lines that are responsible for the 324 print the lines that are responsible for the
192 error. 325 error.
193 """ 326 """
194 import bb,sys 327 import bb.parse
195 try: 328 try:
196 exec code in context 329 exec(code, _context, context)
197 except: 330 except:
198 (t,value,tb) = sys.exc_info() 331 (t, value, tb) = sys.exc_info()
199 332
200 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]: 333 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
201 raise 334 raise
202 335
203 # print the Header of the Error Message 336 # print the Header of the Error Message
204 bb.msg.error(bb.msg.domain.Util, "Error in executing python function in: %s" % realfile) 337 bb.msg.error(bb.msg.domain.Util, "Error in executing python function in: %s" % realfile)
205 bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t,value) ) 338 bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t, value))
206 339
207 # let us find the line number now 340 # let us find the line number now
208 while tb.tb_next: 341 while tb.tb_next:
@@ -212,48 +345,14 @@ def better_exec(code, context, text, realfile):
212 line = traceback.tb_lineno(tb) 345 line = traceback.tb_lineno(tb)
213 346
214 _print_trace( text.split('\n'), line ) 347 _print_trace( text.split('\n'), line )
215 348
216 raise 349 raise
217 350
218def Enum(*names): 351def simple_exec(code, context):
219 """ 352 exec(code, _context, context)
220 A simple class to give Enum support 353
221 """ 354def better_eval(source, locals):
222 355 return eval(source, _context, locals)
223 assert names, "Empty enums are not supported"
224
225 class EnumClass(object):
226 __slots__ = names
227 def __iter__(self): return iter(constants)
228 def __len__(self): return len(constants)
229 def __getitem__(self, i): return constants[i]
230 def __repr__(self): return 'Enum' + str(names)
231 def __str__(self): return 'enum ' + str(constants)
232
233 class EnumValue(object):
234 __slots__ = ('__value')
235 def __init__(self, value): self.__value = value
236 Value = property(lambda self: self.__value)
237 EnumType = property(lambda self: EnumType)
238 def __hash__(self): return hash(self.__value)
239 def __cmp__(self, other):
240 # C fans might want to remove the following assertion
241 # to make all enums comparable by ordinal value {;))
242 assert self.EnumType is other.EnumType, "Only values from the same enum are comparable"
243 return cmp(self.__value, other.__value)
244 def __invert__(self): return constants[maximum - self.__value]
245 def __nonzero__(self): return bool(self.__value)
246 def __repr__(self): return str(names[self.__value])
247
248 maximum = len(names) - 1
249 constants = [None] * len(names)
250 for i, each in enumerate(names):
251 val = EnumValue(i)
252 setattr(EnumClass, each, val)
253 constants[i] = val
254 constants = tuple(constants)
255 EnumType = EnumClass()
256 return EnumType
257 356
258def lockfile(name): 357def lockfile(name):
259 """ 358 """
@@ -262,37 +361,36 @@ def lockfile(name):
262 """ 361 """
263 path = os.path.dirname(name) 362 path = os.path.dirname(name)
264 if not os.path.isdir(path): 363 if not os.path.isdir(path):
265 import bb, sys
266 bb.msg.error(bb.msg.domain.Util, "Error, lockfile path does not exist!: %s" % path) 364 bb.msg.error(bb.msg.domain.Util, "Error, lockfile path does not exist!: %s" % path)
267 sys.exit(1) 365 sys.exit(1)
268 366
269 while True: 367 while True:
270 # If we leave the lockfiles lying around there is no problem 368 # If we leave the lockfiles lying around there is no problem
271 # but we should clean up after ourselves. This gives potential 369 # but we should clean up after ourselves. This gives potential
272 # for races though. To work around this, when we acquire the lock 370 # for races though. To work around this, when we acquire the lock
273 # we check the file we locked was still the lock file on disk. 371 # we check the file we locked was still the lock file on disk.
274 # by comparing inode numbers. If they don't match or the lockfile 372 # by comparing inode numbers. If they don't match or the lockfile
275 # no longer exists, we start again. 373 # no longer exists, we start again.
276 374
277 # This implementation is unfair since the last person to request the 375 # This implementation is unfair since the last person to request the
278 # lock is the most likely to win it. 376 # lock is the most likely to win it.
279 377
280 try: 378 try:
281 lf = open(name, "a+") 379 lf = open(name, "a + ")
282 fcntl.flock(lf.fileno(), fcntl.LOCK_EX) 380 fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
283 statinfo = os.fstat(lf.fileno()) 381 statinfo = os.fstat(lf.fileno())
284 if os.path.exists(lf.name): 382 if os.path.exists(lf.name):
285 statinfo2 = os.stat(lf.name) 383 statinfo2 = os.stat(lf.name)
286 if statinfo.st_ino == statinfo2.st_ino: 384 if statinfo.st_ino == statinfo2.st_ino:
287 return lf 385 return lf
288 # File no longer exists or changed, retry 386 # File no longer exists or changed, retry
289 lf.close 387 lf.close
290 except Exception, e: 388 except Exception as e:
291 continue 389 continue
292 390
293def unlockfile(lf): 391def unlockfile(lf):
294 """ 392 """
295 Unlock a file locked using lockfile() 393 Unlock a file locked using lockfile()
296 """ 394 """
297 os.unlink(lf.name) 395 os.unlink(lf.name)
298 fcntl.flock(lf.fileno(), fcntl.LOCK_UN) 396 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
@@ -308,7 +406,7 @@ def md5_file(filename):
308 except ImportError: 406 except ImportError:
309 import md5 407 import md5
310 m = md5.new() 408 m = md5.new()
311 409
312 for line in open(filename): 410 for line in open(filename):
313 m.update(line) 411 m.update(line)
314 return m.hexdigest() 412 return m.hexdigest()
@@ -368,19 +466,17 @@ def filter_environment(good_vars):
368 are not known and may influence the build in a negative way. 466 are not known and may influence the build in a negative way.
369 """ 467 """
370 468
371 import bb
372
373 removed_vars = [] 469 removed_vars = []
374 for key in os.environ.keys(): 470 for key in os.environ.keys():
375 if key in good_vars: 471 if key in good_vars:
376 continue 472 continue
377 473
378 removed_vars.append(key) 474 removed_vars.append(key)
379 os.unsetenv(key) 475 os.unsetenv(key)
380 del os.environ[key] 476 del os.environ[key]
381 477
382 if len(removed_vars): 478 if len(removed_vars):
383 bb.debug(1, "Removed the following variables from the environment:", ",".join(removed_vars)) 479 bb.msg.debug(1, bb.msg.domain.Util, "Removed the following variables from the environment:", ",".join(removed_vars))
384 480
385 return removed_vars 481 return removed_vars
386 482
@@ -410,7 +506,7 @@ def build_environment(d):
410 """ 506 """
411 Build an environment from all exported variables. 507 Build an environment from all exported variables.
412 """ 508 """
413 import bb 509 import bb.data
414 for var in bb.data.keys(d): 510 for var in bb.data.keys(d):
415 export = bb.data.getVarFlag(var, "export", d) 511 export = bb.data.getVarFlag(var, "export", d)
416 if export: 512 if export:
@@ -419,7 +515,7 @@ def build_environment(d):
419def prunedir(topdir): 515def prunedir(topdir):
420 # Delete everything reachable from the directory named in 'topdir'. 516 # Delete everything reachable from the directory named in 'topdir'.
421 # CAUTION: This is dangerous! 517 # CAUTION: This is dangerous!
422 for root, dirs, files in os.walk(topdir, topdown=False): 518 for root, dirs, files in os.walk(topdir, topdown = False):
423 for name in files: 519 for name in files:
424 os.remove(os.path.join(root, name)) 520 os.remove(os.path.join(root, name))
425 for name in dirs: 521 for name in dirs:
@@ -434,7 +530,7 @@ def prunedir(topdir):
434# but thats possibly insane and suffixes is probably going to be small 530# but thats possibly insane and suffixes is probably going to be small
435# 531#
436def prune_suffix(var, suffixes, d): 532def prune_suffix(var, suffixes, d):
437 # See if var ends with any of the suffixes listed and 533 # See if var ends with any of the suffixes listed and
438 # remove it if found 534 # remove it if found
439 for suffix in suffixes: 535 for suffix in suffixes:
440 if var.endswith(suffix): 536 if var.endswith(suffix):
@@ -446,169 +542,167 @@ def mkdirhier(dir):
446 directory already exists like os.makedirs 542 directory already exists like os.makedirs
447 """ 543 """
448 544
449 bb.debug(3, "mkdirhier(%s)" % dir) 545 bb.msg.debug(3, bb.msg.domain.Util, "mkdirhier(%s)" % dir)
450 try: 546 try:
451 os.makedirs(dir) 547 os.makedirs(dir)
452 bb.debug(2, "created " + dir) 548 bb.msg.debug(2, bb.msg.domain.Util, "created " + dir)
453 except OSError, e: 549 except OSError as e:
454 if e.errno != 17: raise e 550 if e.errno != errno.EEXIST:
455 551 raise e
456import stat
457 552
458def movefile(src,dest,newmtime=None,sstat=None): 553def movefile(src, dest, newmtime = None, sstat = None):
459 """Moves a file from src to dest, preserving all permissions and 554 """Moves a file from src to dest, preserving all permissions and
460 attributes; mtime will be preserved even when moving across 555 attributes; mtime will be preserved even when moving across
461 filesystems. Returns true on success and false on failure. Move is 556 filesystems. Returns true on success and false on failure. Move is
462 atomic. 557 atomic.
463 """ 558 """
464 559
465 #print "movefile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")" 560 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
466 try: 561 try:
467 if not sstat: 562 if not sstat:
468 sstat=os.lstat(src) 563 sstat = os.lstat(src)
469 except Exception, e: 564 except Exception as e:
470 print "movefile: Stating source file failed...", e 565 print("movefile: Stating source file failed...", e)
471 return None 566 return None
472 567
473 destexists=1 568 destexists = 1
474 try: 569 try:
475 dstat=os.lstat(dest) 570 dstat = os.lstat(dest)
476 except: 571 except:
477 dstat=os.lstat(os.path.dirname(dest)) 572 dstat = os.lstat(os.path.dirname(dest))
478 destexists=0 573 destexists = 0
479 574
480 if destexists: 575 if destexists:
481 if stat.S_ISLNK(dstat[stat.ST_MODE]): 576 if stat.S_ISLNK(dstat[stat.ST_MODE]):
482 try: 577 try:
483 os.unlink(dest) 578 os.unlink(dest)
484 destexists=0 579 destexists = 0
485 except Exception, e: 580 except Exception as e:
486 pass 581 pass
487 582
488 if stat.S_ISLNK(sstat[stat.ST_MODE]): 583 if stat.S_ISLNK(sstat[stat.ST_MODE]):
489 try: 584 try:
490 target=os.readlink(src) 585 target = os.readlink(src)
491 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]): 586 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
492 os.unlink(dest) 587 os.unlink(dest)
493 os.symlink(target,dest) 588 os.symlink(target, dest)
494 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) 589 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
495 os.unlink(src) 590 os.unlink(src)
496 return os.lstat(dest) 591 return os.lstat(dest)
497 except Exception, e: 592 except Exception as e:
498 print "movefile: failed to properly create symlink:", dest, "->", target, e 593 print("movefile: failed to properly create symlink:", dest, "->", target, e)
499 return None 594 return None
500 595
501 renamefailed=1 596 renamefailed = 1
502 if sstat[stat.ST_DEV]==dstat[stat.ST_DEV]: 597 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
503 try: 598 try:
504 ret=os.rename(src,dest) 599 os.rename(src, dest)
505 renamefailed=0 600 renamefailed = 0
506 except Exception, e: 601 except Exception as e:
507 import errno 602 if e[0] != errno.EXDEV:
508 if e[0]!=errno.EXDEV:
509 # Some random error. 603 # Some random error.
510 print "movefile: Failed to move", src, "to", dest, e 604 print("movefile: Failed to move", src, "to", dest, e)
511 return None 605 return None
512 # Invalid cross-device-link 'bind' mounted or actually Cross-Device 606 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
513 607
514 if renamefailed: 608 if renamefailed:
515 didcopy=0 609 didcopy = 0
516 if stat.S_ISREG(sstat[stat.ST_MODE]): 610 if stat.S_ISREG(sstat[stat.ST_MODE]):
517 try: # For safety copy then move it over. 611 try: # For safety copy then move it over.
518 shutil.copyfile(src,dest+"#new") 612 shutil.copyfile(src, dest + "#new")
519 os.rename(dest+"#new",dest) 613 os.rename(dest + "#new", dest)
520 didcopy=1 614 didcopy = 1
521 except Exception, e: 615 except Exception as e:
522 print 'movefile: copy', src, '->', dest, 'failed.', e 616 print('movefile: copy', src, '->', dest, 'failed.', e)
523 return None 617 return None
524 else: 618 else:
525 #we don't yet handle special, so we need to fall back to /bin/mv 619 #we don't yet handle special, so we need to fall back to /bin/mv
526 a=getstatusoutput("/bin/mv -f "+"'"+src+"' '"+dest+"'") 620 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
527 if a[0]!=0: 621 if a[0] != 0:
528 print "movefile: Failed to move special file:" + src + "' to '" + dest + "'", a 622 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
529 return None # failure 623 return None # failure
530 try: 624 try:
531 if didcopy: 625 if didcopy:
532 os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) 626 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
533 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown 627 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
534 os.unlink(src) 628 os.unlink(src)
535 except Exception, e: 629 except Exception as e:
536 print "movefile: Failed to chown/chmod/unlink", dest, e 630 print("movefile: Failed to chown/chmod/unlink", dest, e)
537 return None 631 return None
538 632
539 if newmtime: 633 if newmtime:
540 os.utime(dest,(newmtime,newmtime)) 634 os.utime(dest, (newmtime, newmtime))
541 else: 635 else:
542 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME])) 636 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
543 newmtime=sstat[stat.ST_MTIME] 637 newmtime = sstat[stat.ST_MTIME]
544 return newmtime 638 return newmtime
545 639
546def copyfile(src,dest,newmtime=None,sstat=None): 640def copyfile(src, dest, newmtime = None, sstat = None):
547 """ 641 """
548 Copies a file from src to dest, preserving all permissions and 642 Copies a file from src to dest, preserving all permissions and
549 attributes; mtime will be preserved even when moving across 643 attributes; mtime will be preserved even when moving across
550 filesystems. Returns true on success and false on failure. 644 filesystems. Returns true on success and false on failure.
551 """ 645 """
552 #print "copyfile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")" 646 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
553 try: 647 try:
554 if not sstat: 648 if not sstat:
555 sstat=os.lstat(src) 649 sstat = os.lstat(src)
556 except Exception, e: 650 except Exception as e:
557 print "copyfile: Stating source file failed...", e 651 print("copyfile: Stating source file failed...", e)
558 return False 652 return False
559 653
560 destexists=1 654 destexists = 1
561 try: 655 try:
562 dstat=os.lstat(dest) 656 dstat = os.lstat(dest)
563 except: 657 except:
564 dstat=os.lstat(os.path.dirname(dest)) 658 dstat = os.lstat(os.path.dirname(dest))
565 destexists=0 659 destexists = 0
566 660
567 if destexists: 661 if destexists:
568 if stat.S_ISLNK(dstat[stat.ST_MODE]): 662 if stat.S_ISLNK(dstat[stat.ST_MODE]):
569 try: 663 try:
570 os.unlink(dest) 664 os.unlink(dest)
571 destexists=0 665 destexists = 0
572 except Exception, e: 666 except Exception as e:
573 pass 667 pass
574 668
575 if stat.S_ISLNK(sstat[stat.ST_MODE]): 669 if stat.S_ISLNK(sstat[stat.ST_MODE]):
576 try: 670 try:
577 target=os.readlink(src) 671 target = os.readlink(src)
578 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]): 672 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
579 os.unlink(dest) 673 os.unlink(dest)
580 os.symlink(target,dest) 674 os.symlink(target, dest)
581 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) 675 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
582 return os.lstat(dest) 676 return os.lstat(dest)
583 except Exception, e: 677 except Exception as e:
584 print "copyfile: failed to properly create symlink:", dest, "->", target, e 678 print("copyfile: failed to properly create symlink:", dest, "->", target, e)
585 return False 679 return False
586 680
587 if stat.S_ISREG(sstat[stat.ST_MODE]): 681 if stat.S_ISREG(sstat[stat.ST_MODE]):
588 try: # For safety copy then move it over. 682 try: # For safety copy then move it over.
589 shutil.copyfile(src,dest+"#new") 683 shutil.copyfile(src, dest + "#new")
590 os.rename(dest+"#new",dest) 684 os.rename(dest + "#new", dest)
591 except Exception, e: 685 except Exception as e:
592 print 'copyfile: copy', src, '->', dest, 'failed.', e 686 print('copyfile: copy', src, '->', dest, 'failed.', e)
593 return False 687 return False
594 else: 688 else:
595 #we don't yet handle special, so we need to fall back to /bin/mv 689 #we don't yet handle special, so we need to fall back to /bin/mv
596 a=getstatusoutput("/bin/cp -f "+"'"+src+"' '"+dest+"'") 690 a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
597 if a[0]!=0: 691 if a[0] != 0:
598 print "copyfile: Failed to copy special file:" + src + "' to '" + dest + "'", a 692 print("copyfile: Failed to copy special file:" + src + "' to '" + dest + "'", a)
599 return False # failure 693 return False # failure
600 try: 694 try:
601 os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) 695 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
602 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown 696 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
603 except Exception, e: 697 except Exception as e:
604 print "copyfile: Failed to chown/chmod/unlink", dest, e 698 print("copyfile: Failed to chown/chmod/unlink", dest, e)
605 return False 699 return False
606 700
607 if newmtime: 701 if newmtime:
608 os.utime(dest,(newmtime,newmtime)) 702 os.utime(dest, (newmtime, newmtime))
609 else: 703 else:
610 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME])) 704 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
611 newmtime=sstat[stat.ST_MTIME] 705 newmtime = sstat[stat.ST_MTIME]
612 return newmtime 706 return newmtime
613 707
614def which(path, item, direction = 0): 708def which(path, item, direction = 0):