summaryrefslogtreecommitdiffstats
path: root/bitbake/lib
diff options
context:
space:
mode:
authorRichard Purdie <richard@openedhand.com>2008-03-03 22:01:45 +0000
committerRichard Purdie <richard@openedhand.com>2008-03-03 22:01:45 +0000
commitab191d21e2e5e1609206146d238af6ec0b3f0554 (patch)
tree728fa74dbf00f6b11964aa53b8427a0d221d6e91 /bitbake/lib
parente88b4753781d54dc2625c3260c611d30ad76dbed (diff)
downloadpoky-ab191d21e2e5e1609206146d238af6ec0b3f0554.tar.gz
bitbake: Update to bitbake 1.8 branch head
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@3892 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake/lib')
-rw-r--r--bitbake/lib/bb/__init__.py179
-rw-r--r--bitbake/lib/bb/build.py208
-rw-r--r--bitbake/lib/bb/cache.py63
-rw-r--r--bitbake/lib/bb/cooker.py97
-rw-r--r--bitbake/lib/bb/data_smart.py4
-rw-r--r--bitbake/lib/bb/event.py17
-rw-r--r--bitbake/lib/bb/fetch/__init__.py12
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py51
-rw-r--r--bitbake/lib/bb/runqueue.py142
-rw-r--r--bitbake/lib/bb/shell.py23
-rw-r--r--bitbake/lib/bb/taskdata.py5
-rw-r--r--bitbake/lib/bb/utils.py8
12 files changed, 339 insertions, 470 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index a126c17693..c452d529c1 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -46,7 +46,6 @@ __all__ = [
46 "pkgcmp", 46 "pkgcmp",
47 "dep_parenreduce", 47 "dep_parenreduce",
48 "dep_opconvert", 48 "dep_opconvert",
49 "digraph",
50 49
51# fetch 50# fetch
52 "decodeurl", 51 "decodeurl",
@@ -1128,184 +1127,6 @@ def dep_opconvert(mysplit, myuse):
1128 mypos += 1 1127 mypos += 1
1129 return newsplit 1128 return newsplit
1130 1129
1131class digraph:
1132 """beautiful directed graph object"""
1133
1134 def __init__(self):
1135 self.dict={}
1136 #okeys = keys, in order they were added (to optimize firstzero() ordering)
1137 self.okeys=[]
1138 self.__callback_cache=[]
1139
1140 def __str__(self):
1141 str = ""
1142 for key in self.okeys:
1143 str += "%s:\t%s\n" % (key, self.dict[key][1])
1144 return str
1145
1146 def addnode(self,mykey,myparent):
1147 if not mykey in self.dict:
1148 self.okeys.append(mykey)
1149 if myparent==None:
1150 self.dict[mykey]=[0,[]]
1151 else:
1152 self.dict[mykey]=[0,[myparent]]
1153 self.dict[myparent][0]=self.dict[myparent][0]+1
1154 return
1155 if myparent and (not myparent in self.dict[mykey][1]):
1156 self.dict[mykey][1].append(myparent)
1157 self.dict[myparent][0]=self.dict[myparent][0]+1
1158
1159 def delnode(self,mykey, ref = 1):
1160 """Delete a node
1161
1162 If ref is 1, remove references to this node from other nodes.
1163 If ref is 2, remove nodes that reference this node."""
1164 if not mykey in self.dict:
1165 return
1166 for x in self.dict[mykey][1]:
1167 self.dict[x][0]=self.dict[x][0]-1
1168 del self.dict[mykey]
1169 while 1:
1170 try:
1171 self.okeys.remove(mykey)
1172 except ValueError:
1173 break
1174 if ref:
1175 __kill = []
1176 for k in self.okeys:
1177 if mykey in self.dict[k][1]:
1178 if ref == 1 or ref == 2:
1179 self.dict[k][1].remove(mykey)
1180 if ref == 2:
1181 __kill.append(k)
1182 for l in __kill:
1183 self.delnode(l, ref)
1184
1185 def allnodes(self):
1186 "returns all nodes in the dictionary"
1187 keys = self.dict.keys()
1188 ret = []
1189 for key in keys:
1190 ret.append(key)
1191 ret.sort()
1192 return ret
1193
1194 def firstzero(self):
1195 "returns first node with zero references, or NULL if no such node exists"
1196 for x in self.okeys:
1197 if self.dict[x][0]==0:
1198 return x
1199 return None
1200
1201 def firstnonzero(self):
1202 "returns first node with nonzero references, or NULL if no such node exists"
1203 for x in self.okeys:
1204 if self.dict[x][0]!=0:
1205 return x
1206 return None
1207
1208
1209 def allzeros(self):
1210 "returns all nodes with zero references, or NULL if no such node exists"
1211 zerolist = []
1212 for x in self.dict.keys():
1213 if self.dict[x][0]==0:
1214 zerolist.append(x)
1215 return zerolist
1216
1217 def hasallzeros(self):
1218 "returns 0/1, Are all nodes zeros? 1 : 0"
1219 zerolist = []
1220 for x in self.dict.keys():
1221 if self.dict[x][0]!=0:
1222 return 0
1223 return 1
1224
1225 def empty(self):
1226 if len(self.dict)==0:
1227 return 1
1228 return 0
1229
1230 def hasnode(self,mynode):
1231 return mynode in self.dict
1232
1233 def getparents(self, item):
1234 if not self.hasnode(item):
1235 return []
1236 parents = self.dict[item][1]
1237 ret = []
1238 for parent in parents:
1239 ret.append(parent)
1240 ret.sort()
1241 return ret
1242
1243 def getchildren(self, item):
1244 if not self.hasnode(item):
1245 return []
1246 children = [i for i in self.okeys if item in self.getparents(i)]
1247 return children
1248
1249 def walkdown(self, item, callback, debug = None, usecache = False):
1250 if not self.hasnode(item):
1251 return 0
1252
1253 if usecache:
1254 if self.__callback_cache.count(item):
1255 if debug:
1256 print "hit cache for item: %s" % item
1257 return 1
1258
1259 parents = self.getparents(item)
1260 children = self.getchildren(item)
1261 for p in parents:
1262 if p in children:
1263# print "%s is both parent and child of %s" % (p, item)
1264 if usecache:
1265 self.__callback_cache.append(p)
1266 ret = callback(self, p)
1267 if ret == 0:
1268 return 0
1269 continue
1270 if item == p:
1271 print "eek, i'm my own parent!"
1272 return 0
1273 if debug:
1274 print "item: %s, p: %s" % (item, p)
1275 ret = self.walkdown(p, callback, debug, usecache)
1276 if ret == 0:
1277 return 0
1278 if usecache:
1279 self.__callback_cache.append(item)
1280 return callback(self, item)
1281
1282 def walkup(self, item, callback):
1283 if not self.hasnode(item):
1284 return 0
1285
1286 parents = self.getparents(item)
1287 children = self.getchildren(item)
1288 for c in children:
1289 if c in parents:
1290 ret = callback(self, item)
1291 if ret == 0:
1292 return 0
1293 continue
1294 if item == c:
1295 print "eek, i'm my own child!"
1296 return 0
1297 ret = self.walkup(c, callback)
1298 if ret == 0:
1299 return 0
1300 return callback(self, item)
1301
1302 def copy(self):
1303 mygraph=digraph()
1304 for x in self.dict.keys():
1305 mygraph.dict[x]=self.dict[x][:]
1306 mygraph.okeys=self.okeys[:]
1307 return mygraph
1308
1309if __name__ == "__main__": 1130if __name__ == "__main__":
1310 import doctest, bb 1131 import doctest, bb
1311 doctest.testmod(bb) 1132 doctest.testmod(bb)
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 1c015fe9a3..25c03a0a4e 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -74,12 +74,21 @@ def exec_func(func, d, dirs = None):
74 if not body: 74 if not body:
75 return 75 return
76 76
77 cleandirs = (data.expand(data.getVarFlag(func, 'cleandirs', d), d) or "").split() 77 flags = data.getVarFlags(func, d)
78 for item in ['deps', 'check', 'interactive', 'python', 'cleandirs', 'dirs', 'lockfiles', 'fakeroot']:
79 if not item in flags:
80 flags[item] = None
81
82 ispython = flags['python']
83
84 cleandirs = (data.expand(flags['cleandirs'], d) or "").split()
78 for cdir in cleandirs: 85 for cdir in cleandirs:
79 os.system("rm -rf %s" % cdir) 86 os.system("rm -rf %s" % cdir)
80 87
81 if not dirs: 88 if dirs:
82 dirs = (data.expand(data.getVarFlag(func, 'dirs', d), d) or "").split() 89 dirs = data.expand(dirs, d)
90 else:
91 dirs = (data.expand(flags['dirs'], d) or "").split()
83 for adir in dirs: 92 for adir in dirs:
84 mkdirhier(adir) 93 mkdirhier(adir)
85 94
@@ -88,24 +97,22 @@ def exec_func(func, d, dirs = None):
88 else: 97 else:
89 adir = data.getVar('B', d, 1) 98 adir = data.getVar('B', d, 1)
90 99
91 adir = data.expand(adir, d)
92
93 try: 100 try:
94 prevdir = os.getcwd() 101 prevdir = os.getcwd()
95 except OSError: 102 except OSError:
96 prevdir = data.expand('${TOPDIR}', d) 103 prevdir = data.getVar('TOPDIR', d, True)
97 if adir and os.access(adir, os.F_OK): 104 if adir and os.access(adir, os.F_OK):
98 os.chdir(adir) 105 os.chdir(adir)
99 106
100 locks = [] 107 locks = []
101 lockfiles = (data.expand(data.getVarFlag(func, 'lockfiles', d), d) or "").split() 108 lockfiles = (data.expand(flags['lockfiles'], d) or "").split()
102 for lock in lockfiles: 109 for lock in lockfiles:
103 locks.append(bb.utils.lockfile(lock)) 110 locks.append(bb.utils.lockfile(lock))
104 111
105 if data.getVarFlag(func, "python", d): 112 if flags['python']:
106 exec_func_python(func, d) 113 exec_func_python(func, d)
107 else: 114 else:
108 exec_func_shell(func, d) 115 exec_func_shell(func, d, flags)
109 116
110 for lock in locks: 117 for lock in locks:
111 bb.utils.unlockfile(lock) 118 bb.utils.unlockfile(lock)
@@ -117,19 +124,20 @@ def exec_func_python(func, d):
117 """Execute a python BB 'function'""" 124 """Execute a python BB 'function'"""
118 import re, os 125 import re, os
119 126
127 bbfile = bb.data.getVar('FILE', d, 1)
120 tmp = "def " + func + "():\n%s" % data.getVar(func, d) 128 tmp = "def " + func + "():\n%s" % data.getVar(func, d)
121 tmp += '\n' + func + '()' 129 tmp += '\n' + func + '()'
122 comp = utils.better_compile(tmp, func, bb.data.getVar('FILE', d, 1) ) 130 comp = utils.better_compile(tmp, func, bbfile)
123 prevdir = os.getcwd() 131 prevdir = os.getcwd()
124 g = {} # globals 132 g = {} # globals
125 g['bb'] = bb 133 g['bb'] = bb
126 g['os'] = os 134 g['os'] = os
127 g['d'] = d 135 g['d'] = d
128 utils.better_exec(comp,g,tmp, bb.data.getVar('FILE',d,1)) 136 utils.better_exec(comp, g, tmp, bbfile)
129 if os.path.exists(prevdir): 137 if os.path.exists(prevdir):
130 os.chdir(prevdir) 138 os.chdir(prevdir)
131 139
132def exec_func_shell(func, d): 140def exec_func_shell(func, d, flags):
133 """Execute a shell BB 'function' Returns true if execution was successful. 141 """Execute a shell BB 'function' Returns true if execution was successful.
134 142
135 For this, it creates a bash shell script in the tmp dectory, writes the local 143 For this, it creates a bash shell script in the tmp dectory, writes the local
@@ -141,9 +149,9 @@ def exec_func_shell(func, d):
141 """ 149 """
142 import sys 150 import sys
143 151
144 deps = data.getVarFlag(func, 'deps', d) 152 deps = flags['deps']
145 check = data.getVarFlag(func, 'check', d) 153 check = flags['check']
146 interact = data.getVarFlag(func, 'interactive', d) 154 interact = flags['interactive']
147 if check in globals(): 155 if check in globals():
148 if globals()[check](func, deps): 156 if globals()[check](func, deps):
149 return 157 return
@@ -195,7 +203,7 @@ def exec_func_shell(func, d):
195 203
196 # execute function 204 # execute function
197 prevdir = os.getcwd() 205 prevdir = os.getcwd()
198 if data.getVarFlag(func, "fakeroot", d): 206 if flags['fakeroot']:
199 maybe_fakeroot = "PATH=\"%s\" fakeroot " % bb.data.getVar("PATH", d, 1) 207 maybe_fakeroot = "PATH=\"%s\" fakeroot " % bb.data.getVar("PATH", d, 1)
200 else: 208 else:
201 maybe_fakeroot = '' 209 maybe_fakeroot = ''
@@ -255,72 +263,29 @@ def exec_task(task, d):
255 a function is that a task exists in the task digraph, and therefore 263 a function is that a task exists in the task digraph, and therefore
256 has dependencies amongst other tasks.""" 264 has dependencies amongst other tasks."""
257 265
258 # check if the task is in the graph.. 266 # Check whther this is a valid task
259 task_graph = data.getVar('_task_graph', d) 267 if not data.getVarFlag(task, 'task', d):
260 if not task_graph: 268 raise EventException("No such task", InvalidTask(task, d))
261 task_graph = bb.digraph() 269
262 data.setVar('_task_graph', task_graph, d) 270 try:
263 task_cache = data.getVar('_task_cache', d) 271 bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task)
264 if not task_cache: 272 old_overrides = data.getVar('OVERRIDES', d, 0)
265 task_cache = [] 273 localdata = data.createCopy(d)
266 data.setVar('_task_cache', task_cache, d) 274 data.setVar('OVERRIDES', 'task_%s:%s' % (task, old_overrides), localdata)
267 if not task_graph.hasnode(task): 275 data.update_data(localdata)
268 raise EventException("Missing node in task graph", InvalidTask(task, d)) 276 event.fire(TaskStarted(task, localdata))
269 277 exec_func(task, localdata)
270 # check whether this task needs executing.. 278 event.fire(TaskSucceeded(task, localdata))
271 if stamp_is_current(task, d): 279 except FuncFailed, reason:
272 return 1 280 bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % reason )
273 281 failedevent = TaskFailed(task, d)
274 # follow digraph path up, then execute our way back down 282 event.fire(failedevent)
275 def execute(graph, item): 283 raise EventException("Function failed in task: %s" % reason, failedevent)
276 if data.getVarFlag(item, 'task', d):
277 if item in task_cache:
278 return 1
279
280 if task != item:
281 # deeper than toplevel, exec w/ deps
282 exec_task(item, d)
283 return 1
284
285 try:
286 bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % item)
287 old_overrides = data.getVar('OVERRIDES', d, 0)
288 localdata = data.createCopy(d)
289 data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata)
290 data.update_data(localdata)
291 event.fire(TaskStarted(item, localdata))
292 exec_func(item, localdata)
293 event.fire(TaskSucceeded(item, localdata))
294 task_cache.append(item)
295 data.setVar('_task_cache', task_cache, d)
296 except FuncFailed, reason:
297 bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % reason )
298 failedevent = TaskFailed(item, d)
299 event.fire(failedevent)
300 raise EventException("Function failed in task: %s" % reason, failedevent)
301
302 if data.getVarFlag(task, 'dontrundeps', d):
303 execute(None, task)
304 else:
305 task_graph.walkdown(task, execute)
306 284
307 # make stamp, or cause event and raise exception 285 # make stamp, or cause event and raise exception
308 if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d): 286 if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d):
309 make_stamp(task, d) 287 make_stamp(task, d)
310 288
311def extract_stamp_data(d, fn):
312 """
313 Extracts stamp data from d which is either a data dictonary (fn unset)
314 or a dataCache entry (fn set).
315 """
316 if fn:
317 return (d.task_queues[fn], d.stamp[fn], d.task_deps[fn])
318 task_graph = data.getVar('_task_graph', d)
319 if not task_graph:
320 task_graph = bb.digraph()
321 data.setVar('_task_graph', task_graph, d)
322 return (task_graph, data.getVar('STAMP', d, 1), None)
323
324def extract_stamp(d, fn): 289def extract_stamp(d, fn):
325 """ 290 """
326 Extracts stamp format which is either a data dictonary (fn unset) 291 Extracts stamp format which is either a data dictonary (fn unset)
@@ -330,49 +295,6 @@ def extract_stamp(d, fn):
330 return d.stamp[fn] 295 return d.stamp[fn]
331 return data.getVar('STAMP', d, 1) 296 return data.getVar('STAMP', d, 1)
332 297
333def stamp_is_current(task, d, file_name = None, checkdeps = 1):
334 """
335 Check status of a given task's stamp.
336 Returns 0 if it is not current and needs updating.
337 (d can be a data dict or dataCache)
338 """
339
340 (task_graph, stampfn, taskdep) = extract_stamp_data(d, file_name)
341
342 if not stampfn:
343 return 0
344
345 stampfile = "%s.%s" % (stampfn, task)
346 if not os.access(stampfile, os.F_OK):
347 return 0
348
349 if checkdeps == 0:
350 return 1
351
352 import stat
353 tasktime = os.stat(stampfile)[stat.ST_MTIME]
354
355 _deps = []
356 def checkStamp(graph, task):
357 # check for existance
358 if file_name:
359 if 'nostamp' in taskdep and task in taskdep['nostamp']:
360 return 1
361 else:
362 if data.getVarFlag(task, 'nostamp', d):
363 return 1
364
365 if not stamp_is_current(task, d, file_name, 0 ):
366 return 0
367
368 depfile = "%s.%s" % (stampfn, task)
369 deptime = os.stat(depfile)[stat.ST_MTIME]
370 if deptime > tasktime:
371 return 0
372 return 1
373
374 return task_graph.walkdown(task, checkStamp)
375
376def stamp_internal(task, d, file_name): 298def stamp_internal(task, d, file_name):
377 """ 299 """
378 Internal stamp helper function 300 Internal stamp helper function
@@ -409,40 +331,39 @@ def del_stamp(task, d, file_name = None):
409 stamp_internal(task, d, file_name) 331 stamp_internal(task, d, file_name)
410 332
411def add_tasks(tasklist, d): 333def add_tasks(tasklist, d):
412 task_graph = data.getVar('_task_graph', d)
413 task_deps = data.getVar('_task_deps', d) 334 task_deps = data.getVar('_task_deps', d)
414 if not task_graph:
415 task_graph = bb.digraph()
416 if not task_deps: 335 if not task_deps:
417 task_deps = {} 336 task_deps = {}
337 if not 'tasks' in task_deps:
338 task_deps['tasks'] = []
339 if not 'parents' in task_deps:
340 task_deps['parents'] = {}
418 341
419 for task in tasklist: 342 for task in tasklist:
420 deps = tasklist[task]
421 task = data.expand(task, d) 343 task = data.expand(task, d)
422
423 data.setVarFlag(task, 'task', 1, d) 344 data.setVarFlag(task, 'task', 1, d)
424 task_graph.addnode(task, None) 345
425 for dep in deps: 346 if not task in task_deps['tasks']:
426 dep = data.expand(dep, d) 347 task_deps['tasks'].append(task)
427 if not task_graph.hasnode(dep):
428 task_graph.addnode(dep, None)
429 task_graph.addnode(task, dep)
430 348
431 flags = data.getVarFlags(task, d) 349 flags = data.getVarFlags(task, d)
432 def getTask(name): 350 def getTask(name):
351 if not name in task_deps:
352 task_deps[name] = {}
433 if name in flags: 353 if name in flags:
434 deptask = data.expand(flags[name], d) 354 deptask = data.expand(flags[name], d)
435 if not name in task_deps:
436 task_deps[name] = {}
437 task_deps[name][task] = deptask 355 task_deps[name][task] = deptask
438 getTask('depends') 356 getTask('depends')
439 getTask('deptask') 357 getTask('deptask')
440 getTask('rdeptask') 358 getTask('rdeptask')
441 getTask('recrdeptask') 359 getTask('recrdeptask')
442 getTask('nostamp') 360 getTask('nostamp')
361 task_deps['parents'][task] = []
362 for dep in flags['deps']:
363 dep = data.expand(dep, d)
364 task_deps['parents'][task].append(dep)
443 365
444 # don't assume holding a reference 366 # don't assume holding a reference
445 data.setVar('_task_graph', task_graph, d)
446 data.setVar('_task_deps', task_deps, d) 367 data.setVar('_task_deps', task_deps, d)
447 368
448def remove_task(task, kill, d): 369def remove_task(task, kill, d):
@@ -450,22 +371,5 @@ def remove_task(task, kill, d):
450 371
451 If kill is 1, also remove tasks that depend on this task.""" 372 If kill is 1, also remove tasks that depend on this task."""
452 373
453 task_graph = data.getVar('_task_graph', d)
454 if not task_graph:
455 task_graph = bb.digraph()
456 if not task_graph.hasnode(task):
457 return
458
459 data.delVarFlag(task, 'task', d) 374 data.delVarFlag(task, 'task', d)
460 ref = 1 375
461 if kill == 1:
462 ref = 2
463 task_graph.delnode(task, ref)
464 data.setVar('_task_graph', task_graph, d)
465
466def task_exists(task, d):
467 task_graph = data.getVar('_task_graph', d)
468 if not task_graph:
469 task_graph = bb.digraph()
470 data.setVar('_task_graph', task_graph, d)
471 return task_graph.hasnode(task)
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 7d7e66ebd2..dad82a9b36 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -39,7 +39,7 @@ except ImportError:
39 import pickle 39 import pickle
40 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") 40 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
41 41
42__cache_version__ = "127" 42__cache_version__ = "128"
43 43
44class Cache: 44class Cache:
45 """ 45 """
@@ -50,9 +50,11 @@ class Cache:
50 50
51 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True) 51 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
52 self.clean = {} 52 self.clean = {}
53 self.checked = {}
53 self.depends_cache = {} 54 self.depends_cache = {}
54 self.data = None 55 self.data = None
55 self.data_fn = None 56 self.data_fn = None
57 self.cacheclean = True
56 58
57 if self.cachedir in [None, '']: 59 if self.cachedir in [None, '']:
58 self.has_cache = False 60 self.has_cache = False
@@ -67,9 +69,20 @@ class Cache:
67 except OSError: 69 except OSError:
68 bb.mkdirhier( self.cachedir ) 70 bb.mkdirhier( self.cachedir )
69 71
70 if self.has_cache and (self.mtime(self.cachefile)): 72 if not self.has_cache:
73 return
74
75 # If any of configuration.data's dependencies are newer than the
76 # cache there isn't even any point in loading it...
77 newest_mtime = 0
78 deps = bb.data.getVar("__depends", cooker.configuration.data, True)
79 for f,old_mtime in deps:
80 if old_mtime > newest_mtime:
81 newest_mtime = old_mtime
82
83 if self.mtime(self.cachefile) >= newest_mtime:
71 try: 84 try:
72 p = pickle.Unpickler( file(self.cachefile,"rb")) 85 p = pickle.Unpickler(file(self.cachefile, "rb"))
73 self.depends_cache, version_data = p.load() 86 self.depends_cache, version_data = p.load()
74 if version_data['CACHE_VER'] != __cache_version__: 87 if version_data['CACHE_VER'] != __cache_version__:
75 raise ValueError, 'Cache Version Mismatch' 88 raise ValueError, 'Cache Version Mismatch'
@@ -81,11 +94,8 @@ class Cache:
81 except (ValueError, KeyError): 94 except (ValueError, KeyError):
82 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...") 95 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
83 self.depends_cache = {} 96 self.depends_cache = {}
84 97 else:
85 if self.depends_cache: 98 bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...")
86 for fn in self.depends_cache.keys():
87 self.clean[fn] = ""
88 self.cacheValidUpdate(fn)
89 99
90 def getVar(self, var, fn, exp = 0): 100 def getVar(self, var, fn, exp = 0):
91 """ 101 """
@@ -97,7 +107,6 @@ class Cache:
97 2. We're learning what data to cache - serve from data 107 2. We're learning what data to cache - serve from data
98 backend but add a copy of the data to the cache. 108 backend but add a copy of the data to the cache.
99 """ 109 """
100
101 if fn in self.clean: 110 if fn in self.clean:
102 return self.depends_cache[fn][var] 111 return self.depends_cache[fn][var]
103 112
@@ -109,6 +118,7 @@ class Cache:
109 # yet setData hasn't been called to setup the right access. Very bad. 118 # yet setData hasn't been called to setup the right access. Very bad.
110 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn)) 119 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
111 120
121 self.cacheclean = False
112 result = bb.data.getVar(var, self.data, exp) 122 result = bb.data.getVar(var, self.data, exp)
113 self.depends_cache[fn][var] = result 123 self.depends_cache[fn][var] = result
114 return result 124 return result
@@ -131,6 +141,8 @@ class Cache:
131 Return a complete set of data for fn. 141 Return a complete set of data for fn.
132 To do this, we need to parse the file. 142 To do this, we need to parse the file.
133 """ 143 """
144 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn)
145
134 bb_data, skipped = self.load_bbfile(fn, cfgData) 146 bb_data, skipped = self.load_bbfile(fn, cfgData)
135 return bb_data 147 return bb_data
136 148
@@ -142,11 +154,15 @@ class Cache:
142 to record the variables accessed. 154 to record the variables accessed.
143 Return the cache status and whether the file was skipped when parsed 155 Return the cache status and whether the file was skipped when parsed
144 """ 156 """
157 if fn not in self.checked:
158 self.cacheValidUpdate(fn)
145 if self.cacheValid(fn): 159 if self.cacheValid(fn):
146 if "SKIPPED" in self.depends_cache[fn]: 160 if "SKIPPED" in self.depends_cache[fn]:
147 return True, True 161 return True, True
148 return True, False 162 return True, False
149 163
164 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn)
165
150 bb_data, skipped = self.load_bbfile(fn, cfgData) 166 bb_data, skipped = self.load_bbfile(fn, cfgData)
151 self.setData(fn, bb_data) 167 self.setData(fn, bb_data)
152 return False, skipped 168 return False, skipped
@@ -172,11 +188,10 @@ class Cache:
172 if not self.has_cache: 188 if not self.has_cache:
173 return False 189 return False
174 190
175 # Check file still exists 191 self.checked[fn] = ""
176 if self.mtime(fn) == 0: 192
177 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn) 193 # Pretend we're clean so getVar works
178 self.remove(fn) 194 self.clean[fn] = ""
179 return False
180 195
181 # File isn't in depends_cache 196 # File isn't in depends_cache
182 if not fn in self.depends_cache: 197 if not fn in self.depends_cache:
@@ -184,6 +199,12 @@ class Cache:
184 self.remove(fn) 199 self.remove(fn)
185 return False 200 return False
186 201
202 # Check file still exists
203 if self.mtime(fn) == 0:
204 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn)
205 self.remove(fn)
206 return False
207
187 # Check the file's timestamp 208 # Check the file's timestamp
188 if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True): 209 if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True):
189 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn) 210 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
@@ -195,6 +216,7 @@ class Cache:
195 for f,old_mtime in depends: 216 for f,old_mtime in depends:
196 # Check if file still exists 217 # Check if file still exists
197 if self.mtime(f) == 0: 218 if self.mtime(f) == 0:
219 self.remove(fn)
198 return False 220 return False
199 221
200 new_mtime = bb.parse.cached_mtime(f) 222 new_mtime = bb.parse.cached_mtime(f)
@@ -203,7 +225,7 @@ class Cache:
203 self.remove(fn) 225 self.remove(fn)
204 return False 226 return False
205 227
206 bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn) 228 #bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
207 if not fn in self.clean: 229 if not fn in self.clean:
208 self.clean[fn] = "" 230 self.clean[fn] = ""
209 231
@@ -238,6 +260,10 @@ class Cache:
238 if not self.has_cache: 260 if not self.has_cache:
239 return 261 return
240 262
263 if self.cacheclean:
264 bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.")
265 return
266
241 version_data = {} 267 version_data = {}
242 version_data['CACHE_VER'] = __cache_version__ 268 version_data['CACHE_VER'] = __cache_version__
243 version_data['BITBAKE_VER'] = bb.__version__ 269 version_data['BITBAKE_VER'] = bb.__version__
@@ -264,7 +290,6 @@ class Cache:
264 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split() 290 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
265 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split() 291 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
266 292
267 cacheData.task_queues[file_name] = self.getVar("_task_graph", file_name, True)
268 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True) 293 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True)
269 294
270 # build PackageName to FileName lookup table 295 # build PackageName to FileName lookup table
@@ -328,14 +353,16 @@ class Cache:
328 if not file_name in cacheData.runrecs: 353 if not file_name in cacheData.runrecs:
329 cacheData.runrecs[file_name] = {} 354 cacheData.runrecs[file_name] = {}
330 355
356 rdepends = bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or "")
357 rrecommends = bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or "")
331 for package in packages + [pn]: 358 for package in packages + [pn]:
332 if not package in cacheData.rundeps[file_name]: 359 if not package in cacheData.rundeps[file_name]:
333 cacheData.rundeps[file_name][package] = {} 360 cacheData.rundeps[file_name][package] = {}
334 if not package in cacheData.runrecs[file_name]: 361 if not package in cacheData.runrecs[file_name]:
335 cacheData.runrecs[file_name][package] = {} 362 cacheData.runrecs[file_name][package] = {}
336 363
337 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar('RDEPENDS', file_name, True) or "")) 364 add_dep(cacheData.rundeps[file_name][package], rdepends)
338 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar('RRECOMMENDS', file_name, True) or "")) 365 add_dep(cacheData.runrecs[file_name][package], rrecommends)
339 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or "")) 366 add_dep(cacheData.rundeps[file_name][package], bb.utils.explode_deps(self.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
340 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")) 367 add_dep(cacheData.runrecs[file_name][package], bb.utils.explode_deps(self.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
341 368
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 2c091b6522..38a8209760 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -97,14 +97,12 @@ class BBCooker:
97 bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice)) 97 bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice))
98 98
99 99
100 def tryBuildPackage(self, fn, item, task, the_data, build_depends): 100 def tryBuildPackage(self, fn, item, task, the_data):
101 """ 101 """
102 Build one task of a package, optionally build following task depends 102 Build one task of a package, optionally build following task depends
103 """ 103 """
104 bb.event.fire(bb.event.PkgStarted(item, the_data)) 104 bb.event.fire(bb.event.PkgStarted(item, the_data))
105 try: 105 try:
106 if not build_depends:
107 bb.data.setVarFlag('do_%s' % task, 'dontrundeps', 1, the_data)
108 if not self.configuration.dry_run: 106 if not self.configuration.dry_run:
109 bb.build.exec_task('do_%s' % task, the_data) 107 bb.build.exec_task('do_%s' % task, the_data)
110 bb.event.fire(bb.event.PkgSucceeded(item, the_data)) 108 bb.event.fire(bb.event.PkgSucceeded(item, the_data))
@@ -119,21 +117,20 @@ class BBCooker:
119 bb.event.fire(bb.event.PkgFailed(item, the_data)) 117 bb.event.fire(bb.event.PkgFailed(item, the_data))
120 raise 118 raise
121 119
122 def tryBuild( self, fn, build_depends): 120 def tryBuild(self, fn):
123 """ 121 """
124 Build a provider and its dependencies. 122 Build a provider and its dependencies.
125 build_depends is a list of previous build dependencies (not runtime) 123 build_depends is a list of previous build dependencies (not runtime)
126 If build_depends is empty, we're dealing with a runtime depends 124 If build_depends is empty, we're dealing with a runtime depends
127 """ 125 """
128
129 the_data = self.bb_cache.loadDataFull(fn, self.configuration.data) 126 the_data = self.bb_cache.loadDataFull(fn, self.configuration.data)
130 127
131 item = self.status.pkg_fn[fn] 128 item = self.status.pkg_fn[fn]
132 129
133 if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data): 130 #if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data):
134 return True 131 # return True
135 132
136 return self.tryBuildPackage(fn, item, self.configuration.cmd, the_data, build_depends) 133 return self.tryBuildPackage(fn, item, self.configuration.cmd, the_data)
137 134
138 def showVersions(self): 135 def showVersions(self):
139 pkg_pn = self.status.pkg_pn 136 pkg_pn = self.status.pkg_pn
@@ -184,6 +181,8 @@ class BBCooker:
184 self.cb = None 181 self.cb = None
185 self.bb_cache = bb.cache.init(self) 182 self.bb_cache = bb.cache.init(self)
186 fn = self.matchFile(buildfile) 183 fn = self.matchFile(buildfile)
184 if not fn:
185 sys.exit(1)
187 elif len(pkgs_to_build) == 1: 186 elif len(pkgs_to_build) == 1:
188 self.updateCache() 187 self.updateCache()
189 188
@@ -220,7 +219,7 @@ class BBCooker:
220 except Exception, e: 219 except Exception, e:
221 bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) 220 bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
222 # emit the metadata which isnt valid shell 221 # emit the metadata which isnt valid shell
223 data.expandKeys( envdata ) 222 data.expandKeys( envdata )
224 for e in envdata.keys(): 223 for e in envdata.keys():
225 if data.getVarFlag( e, 'python', envdata ): 224 if data.getVarFlag( e, 'python', envdata ):
226 sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1))) 225 sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1)))
@@ -273,7 +272,7 @@ class BBCooker:
273 if fnid not in seen_fnids: 272 if fnid not in seen_fnids:
274 seen_fnids.append(fnid) 273 seen_fnids.append(fnid)
275 packages = [] 274 packages = []
276 print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn) 275 print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn)
277 for depend in self.status.deps[fn]: 276 for depend in self.status.deps[fn]:
278 print >> depends_file, '"%s" -> "%s"' % (pn, depend) 277 print >> depends_file, '"%s" -> "%s"' % (pn, depend)
279 rdepends = self.status.rundeps[fn] 278 rdepends = self.status.rundeps[fn]
@@ -387,19 +386,15 @@ class BBCooker:
387 try: 386 try:
388 self.configuration.data = bb.parse.handle( afile, self.configuration.data ) 387 self.configuration.data = bb.parse.handle( afile, self.configuration.data )
389 388
390 # Add the handlers we inherited by INHERIT 389 # Handle any INHERITs and inherit the base class
391 # we need to do this manually as it is not guranteed 390 inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
392 # we will pick up these classes... as we only INHERIT
393 # on .inc and .bb files but not on .conf
394 data = bb.data.createCopy( self.configuration.data )
395 inherits = ["base"] + (bb.data.getVar('INHERIT', data, True ) or "").split()
396 for inherit in inherits: 391 for inherit in inherits:
397 data = bb.parse.handle( os.path.join('classes', '%s.bbclass' % inherit ), data, True ) 392 self.configuration.data = bb.parse.handle(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True )
398 393
399 # FIXME: This assumes that we included at least one .inc file 394 # Nomally we only register event handlers at the end of parsing .bb files
400 for var in bb.data.keys(data): 395 # We register any handlers we've found so far here...
401 if bb.data.getVarFlag(var, 'handler', data): 396 for var in data.getVar('__BBHANDLERS', self.configuration.data) or []:
402 bb.event.register(var,bb.data.getVar(var, data)) 397 bb.event.register(var,bb.data.getVar(var, self.configuration.data))
403 398
404 bb.fetch.fetcher_init(self.configuration.data) 399 bb.fetch.fetcher_init(self.configuration.data)
405 400
@@ -463,30 +458,62 @@ class BBCooker:
463 bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches))) 458 bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches)))
464 for f in matches: 459 for f in matches:
465 bb.msg.error(bb.msg.domain.Parsing, " %s" % f) 460 bb.msg.error(bb.msg.domain.Parsing, " %s" % f)
466 sys.exit(1) 461 return False
467 return matches[0] 462 return matches[0]
468 463
469 def buildFile(self, buildfile): 464 def buildFile(self, buildfile):
470 """ 465 """
471 Build the file matching regexp buildfile 466 Build the file matching regexp buildfile
472 """ 467 """
473 468
474 bf = self.matchFile(buildfile) 469 # Make sure our target is a fully qualified filename
470 fn = self.matchFile(buildfile)
471 if not fn:
472 return False
475 473
476 bbfile_data = bb.parse.handle(bf, self.configuration.data) 474 # Load data into the cache for fn
475 self.bb_cache = bb.cache.init(self)
476 self.bb_cache.loadData(fn, self.configuration.data)
477
478 # Parse the loaded cache data
479 self.status = bb.cache.CacheData()
480 self.bb_cache.handle_data(fn, self.status)
481
482 # Tweak some variables
483 item = self.bb_cache.getVar('PN', fn, True)
484 self.status.ignored_dependencies = Set()
485 self.status.bbfile_priority[fn] = 1
486
487 # Remove external dependencies
488 self.status.task_deps[fn]['depends'] = {}
489 self.status.deps[fn] = []
490 self.status.rundeps[fn] = []
491 self.status.runrecs[fn] = []
477 492
478 # Remove stamp for target if force mode active 493 # Remove stamp for target if force mode active
479 if self.configuration.force: 494 if self.configuration.force:
480 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (self.configuration.cmd, bf)) 495 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (self.configuration.cmd, fn))
481 bb.build.del_stamp('do_%s' % self.configuration.cmd, bbfile_data) 496 bb.build.del_stamp('do_%s' % self.configuration.cmd, bbfile_data)
482 497
483 item = bb.data.getVar('PN', bbfile_data, 1) 498 # Setup taskdata structure
484 try: 499 taskdata = bb.taskdata.TaskData(self.configuration.abort)
485 self.tryBuildPackage(bf, item, self.configuration.cmd, bbfile_data, True) 500 taskdata.add_provider(self.configuration.data, self.status, item)
486 except bb.build.EventException:
487 bb.msg.error(bb.msg.domain.Build, "Build of '%s' failed" % item )
488 501
489 sys.exit(0) 502 buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
503 bb.event.fire(bb.event.BuildStarted(buildname, [item], self.configuration.event_data))
504
505 # Execute the runqueue
506 runlist = [[item, "do_%s" % self.configuration.cmd]]
507 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
508 rq.prepare_runqueue()
509 try:
510 failures = rq.execute_runqueue()
511 except runqueue.TaskFailure, fnids:
512 for fnid in fnids:
513 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
514 return False
515 bb.event.fire(bb.event.BuildCompleted(buildname, [item], self.configuration.event_data, failures))
516 return True
490 517
491 def buildTargets(self, targets): 518 def buildTargets(self, targets):
492 """ 519 """
@@ -568,7 +595,9 @@ class BBCooker:
568 self.interactiveMode() 595 self.interactiveMode()
569 596
570 if self.configuration.buildfile is not None: 597 if self.configuration.buildfile is not None:
571 return self.buildFile(self.configuration.buildfile) 598 if not self.buildFile(self.configuration.buildfile):
599 sys.exit(1)
600 sys.exit(0)
572 601
573 # initialise the parsing status now we know we will need deps 602 # initialise the parsing status now we know we will need deps
574 self.updateCache() 603 self.updateCache()
@@ -676,7 +705,7 @@ class BBCooker:
676 for i in xrange( len( filelist ) ): 705 for i in xrange( len( filelist ) ):
677 f = filelist[i] 706 f = filelist[i]
678 707
679 bb.msg.debug(1, bb.msg.domain.Collection, "parsing %s" % f) 708 #bb.msg.debug(1, bb.msg.domain.Collection, "parsing %s" % f)
680 709
681 # read a file's metadata 710 # read a file's metadata
682 try: 711 try:
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index e879343f5d..b3a51b0edf 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -232,10 +232,10 @@ class DataSmart:
232 flags = {} 232 flags = {}
233 233
234 if local_var: 234 if local_var:
235 for i in self.dict[var].keys(): 235 for i in local_var.keys():
236 if i == "content": 236 if i == "content":
237 continue 237 continue
238 flags[i] = self.dict[var][i] 238 flags[i] = local_var[i]
239 239
240 if len(flags) == 0: 240 if len(flags) == 0:
241 return None 241 return None
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 7148a2b7d6..c0a59e6120 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -127,6 +127,23 @@ def getName(e):
127class ConfigParsed(Event): 127class ConfigParsed(Event):
128 """Configuration Parsing Complete""" 128 """Configuration Parsing Complete"""
129 129
130class StampUpdate(Event):
131 """Trigger for any adjustment of the stamp files to happen"""
132
133 def __init__(self, targets, stampfns, d):
134 self._targets = targets
135 self._stampfns = stampfns
136 Event.__init__(self, d)
137
138 def getStampPrefix(self):
139 return self._stampfns
140
141 def getTargets(self):
142 return self._targets
143
144 stampPrefix = property(getStampPrefix)
145 targets = property(getTargets)
146
130class PkgBase(Event): 147class PkgBase(Event):
131 """Base class for package events""" 148 """Base class for package events"""
132 149
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index 700efcb4ac..4919b9d473 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -139,13 +139,21 @@ def go(d):
139 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): 139 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
140 # File already present along with md5 stamp file 140 # File already present along with md5 stamp file
141 # Touch md5 file to show activity 141 # Touch md5 file to show activity
142 os.utime(ud.md5, None) 142 try:
143 os.utime(ud.md5, None)
144 except:
145 # Errors aren't fatal here
146 pass
143 continue 147 continue
144 lf = bb.utils.lockfile(ud.lockfile) 148 lf = bb.utils.lockfile(ud.lockfile)
145 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): 149 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
146 # If someone else fetched this before we got the lock, 150 # If someone else fetched this before we got the lock,
147 # notice and don't try again 151 # notice and don't try again
148 os.utime(ud.md5, None) 152 try:
153 os.utime(ud.md5, None)
154 except:
155 # Errors aren't fatal here
156 pass
149 bb.utils.unlockfile(lf) 157 bb.utils.unlockfile(lf)
150 continue 158 continue
151 m.go(u, ud, d) 159 m.go(u, ud, d)
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 2a30e5895a..d7bf6d4f37 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -95,6 +95,10 @@ def handle(fn, d, include = 0):
95 if ext == ".bbclass": 95 if ext == ".bbclass":
96 __classname__ = root 96 __classname__ = root
97 classes.append(__classname__) 97 classes.append(__classname__)
98 __inherit_cache = data.getVar('__inherit_cache', d) or []
99 if not fn in __inherit_cache:
100 __inherit_cache.append(fn)
101 data.setVar('__inherit_cache', __inherit_cache, d)
98 102
99 if include != 0: 103 if include != 0:
100 oldfile = data.getVar('FILE', d) 104 oldfile = data.getVar('FILE', d)
@@ -126,10 +130,6 @@ def handle(fn, d, include = 0):
126 130
127 if ext != ".bbclass": 131 if ext != ".bbclass":
128 data.setVar('FILE', fn, d) 132 data.setVar('FILE', fn, d)
129 i = (data.getVar("INHERIT", d, 1) or "").split()
130 if not "base" in i and __classname__ != "base":
131 i[0:0] = ["base"]
132 inherit(i, d)
133 133
134 lineno = 0 134 lineno = 0
135 while 1: 135 while 1:
@@ -171,33 +171,12 @@ def handle(fn, d, include = 0):
171 all_handlers = {} 171 all_handlers = {}
172 for var in data.getVar('__BBHANDLERS', d) or []: 172 for var in data.getVar('__BBHANDLERS', d) or []:
173 # try to add the handler 173 # try to add the handler
174 # if we added it remember the choiche
175 handler = data.getVar(var,d) 174 handler = data.getVar(var,d)
176 if bb.event.register(var,handler) == bb.event.Registered: 175 bb.event.register(var, handler)
177 all_handlers[var] = handler
178
179 tasklist = {}
180 for var in data.getVar('__BBTASKS', d) or []:
181 if var not in tasklist:
182 tasklist[var] = []
183 deps = data.getVarFlag(var, 'deps', d) or []
184 for p in deps:
185 if p not in tasklist[var]:
186 tasklist[var].append(p)
187
188 postdeps = data.getVarFlag(var, 'postdeps', d) or []
189 for p in postdeps:
190 if p not in tasklist:
191 tasklist[p] = []
192 if var not in tasklist[p]:
193 tasklist[p].append(var)
194 176
177 tasklist = data.getVar('__BBTASKS', d) or []
195 bb.build.add_tasks(tasklist, d) 178 bb.build.add_tasks(tasklist, d)
196 179
197 # now add the handlers
198 if not len(all_handlers) == 0:
199 data.setVar('__all_handlers__', all_handlers, d)
200
201 bbpath.pop(0) 180 bbpath.pop(0)
202 if oldfile: 181 if oldfile:
203 bb.data.setVar("FILE", oldfile, d) 182 bb.data.setVar("FILE", oldfile, d)
@@ -342,15 +321,23 @@ def feeder(lineno, s, fn, root, d):
342 data.setVarFlag(var, "task", 1, d) 321 data.setVarFlag(var, "task", 1, d)
343 322
344 bbtasks = data.getVar('__BBTASKS', d) or [] 323 bbtasks = data.getVar('__BBTASKS', d) or []
345 bbtasks.append(var) 324 if not var in bbtasks:
325 bbtasks.append(var)
346 data.setVar('__BBTASKS', bbtasks, d) 326 data.setVar('__BBTASKS', bbtasks, d)
347 327
328 existing = data.getVarFlag(var, "deps", d) or []
348 if after is not None: 329 if after is not None:
349# set up deps for function 330 # set up deps for function
350 data.setVarFlag(var, "deps", after.split(), d) 331 for entry in after.split():
332 if entry not in existing:
333 existing.append(entry)
334 data.setVarFlag(var, "deps", existing, d)
351 if before is not None: 335 if before is not None:
352# set up things that depend on this func 336 # set up things that depend on this func
353 data.setVarFlag(var, "postdeps", before.split(), d) 337 for entry in before.split():
338 existing = data.getVarFlag(entry, "deps", d) or []
339 if var not in existing:
340 data.setVarFlag(entry, "deps", [var] + existing, d)
354 return 341 return
355 342
356 m = __addhandler_regexp__.match(s) 343 m = __addhandler_regexp__.match(s)
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 9d72d92fac..2765343a3e 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -26,6 +26,7 @@ from bb import msg, data, event, mkdirhier, utils
26from sets import Set 26from sets import Set
27import bb, os, sys 27import bb, os, sys
28import signal 28import signal
29import stat
29 30
30class TaskFailure(Exception): 31class TaskFailure(Exception):
31 """Exception raised when a task in a runqueue fails""" 32 """Exception raised when a task in a runqueue fails"""
@@ -45,11 +46,11 @@ class RunQueueStats:
45 def taskFailed(self): 46 def taskFailed(self):
46 self.failed = self.failed + 1 47 self.failed = self.failed + 1
47 48
48 def taskCompleted(self): 49 def taskCompleted(self, number = 1):
49 self.completed = self.completed + 1 50 self.completed = self.completed + number
50 51
51 def taskSkipped(self): 52 def taskSkipped(self, number = 1):
52 self.skipped = self.skipped + 1 53 self.skipped = self.skipped + number
53 54
54class RunQueueScheduler: 55class RunQueueScheduler:
55 """ 56 """
@@ -144,8 +145,11 @@ class RunQueue:
144 self.taskData = taskData 145 self.taskData = taskData
145 self.targets = targets 146 self.targets = targets
146 147
147 self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", cfgData) or 1) 148 self.cfgdata = cfgData
148 self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData) or "").split() 149 self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", cfgData, 1) or 1)
150 self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split()
151 self.scheduler = bb.data.getVar("BB_SCHEDULER", cfgData, 1) or "speed"
152 self.stamppolicy = bb.data.getVar("BB_STAMP_POLICY", cfgData, 1) or "perfile"
149 153
150 def reset_runqueue(self): 154 def reset_runqueue(self):
151 155
@@ -512,6 +516,7 @@ class RunQueue:
512 for depend in depends: 516 for depend in depends:
513 mark_active(depend, depth+1) 517 mark_active(depend, depth+1)
514 518
519 self.target_pairs = []
515 for target in self.targets: 520 for target in self.targets:
516 targetid = taskData.getbuild_id(target[0]) 521 targetid = taskData.getbuild_id(target[0])
517 522
@@ -522,10 +527,11 @@ class RunQueue:
522 continue 527 continue
523 528
524 fnid = taskData.build_targets[targetid][0] 529 fnid = taskData.build_targets[targetid][0]
530 fn = taskData.fn_index[fnid]
531 self.target_pairs.append((fn, target[1]))
525 532
526 # Remove stamps for targets if force mode active 533 # Remove stamps for targets if force mode active
527 if self.cooker.configuration.force: 534 if self.cooker.configuration.force:
528 fn = taskData.fn_index[fnid]
529 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (target[1], fn)) 535 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (target[1], fn))
530 bb.build.del_stamp(target[1], self.dataCache, fn) 536 bb.build.del_stamp(target[1], self.dataCache, fn)
531 537
@@ -608,10 +614,11 @@ class RunQueue:
608 self.runq_weight = self.calculate_task_weights(endpoints) 614 self.runq_weight = self.calculate_task_weights(endpoints)
609 615
610 # Decide what order to execute the tasks in, pick a scheduler 616 # Decide what order to execute the tasks in, pick a scheduler
611 # FIXME - Allow user selection
612 #self.sched = RunQueueScheduler(self) 617 #self.sched = RunQueueScheduler(self)
613 self.sched = RunQueueSchedulerSpeed(self) 618 if self.scheduler == "completion":
614 #self.sched = RunQueueSchedulerCompletion(self) 619 self.sched = RunQueueSchedulerCompletion(self)
620 else:
621 self.sched = RunQueueSchedulerSpeed(self)
615 622
616 # Sanity Check - Check for multiple tasks building the same provider 623 # Sanity Check - Check for multiple tasks building the same provider
617 prov_list = {} 624 prov_list = {}
@@ -636,6 +643,93 @@ class RunQueue:
636 643
637 #self.dump_data(taskData) 644 #self.dump_data(taskData)
638 645
646 def check_stamps(self):
647 unchecked = {}
648 current = []
649 notcurrent = []
650 buildable = []
651
652 if self.stamppolicy == "perfile":
653 fulldeptree = False
654 else:
655 fulldeptree = True
656
657 for task in range(len(self.runq_fnid)):
658 unchecked[task] = ""
659 if len(self.runq_depends[task]) == 0:
660 buildable.append(task)
661
662 for task in range(len(self.runq_fnid)):
663 if task not in unchecked:
664 continue
665 fn = self.taskData.fn_index[self.runq_fnid[task]]
666 taskname = self.runq_task[task]
667 stampfile = "%s.%s" % (self.dataCache.stamp[fn], taskname)
668 # If the stamp is missing its not current
669 if not os.access(stampfile, os.F_OK):
670 del unchecked[task]
671 notcurrent.append(task)
672 continue
673 # If its a 'nostamp' task, it's not current
674 taskdep = self.dataCache.task_deps[fn]
675 if 'nostamp' in taskdep and task in taskdep['nostamp']:
676 del unchecked[task]
677 notcurrent.append(task)
678 continue
679
680 while (len(buildable) > 0):
681 nextbuildable = []
682 for task in buildable:
683 if task in unchecked:
684 fn = self.taskData.fn_index[self.runq_fnid[task]]
685 taskname = self.runq_task[task]
686 stampfile = "%s.%s" % (self.dataCache.stamp[fn], taskname)
687 iscurrent = True
688
689 t1 = os.stat(stampfile)[stat.ST_MTIME]
690 for dep in self.runq_depends[task]:
691 if iscurrent:
692 fn2 = self.taskData.fn_index[self.runq_fnid[dep]]
693 taskname2 = self.runq_task[dep]
694 stampfile2 = "%s.%s" % (self.dataCache.stamp[fn2], taskname2)
695 if fulldeptree or fn == fn2:
696 if dep in notcurrent:
697 iscurrent = False
698 else:
699 t2 = os.stat(stampfile2)[stat.ST_MTIME]
700 if t1 < t2:
701 iscurrent = False
702 del unchecked[task]
703 if iscurrent:
704 current.append(task)
705 else:
706 notcurrent.append(task)
707
708 for revdep in self.runq_revdeps[task]:
709 alldeps = 1
710 for dep in self.runq_depends[revdep]:
711 if dep in unchecked:
712 alldeps = 0
713 if alldeps == 1:
714 if revdep in unchecked:
715 nextbuildable.append(revdep)
716
717 buildable = nextbuildable
718
719 #for task in range(len(self.runq_fnid)):
720 # fn = self.taskData.fn_index[self.runq_fnid[task]]
721 # taskname = self.runq_task[task]
722 # print "%s %s.%s" % (task, taskname, fn)
723
724 #print "Unchecked: %s" % unchecked
725 #print "Current: %s" % current
726 #print "Not current: %s" % notcurrent
727
728 if len(unchecked) > 0:
729 bb.fatal("check_stamps fatal internal error")
730 return current
731
732
639 def execute_runqueue(self): 733 def execute_runqueue(self):
640 """ 734 """
641 Run the tasks in a queue prepared by prepare_runqueue 735 Run the tasks in a queue prepared by prepare_runqueue
@@ -721,18 +815,13 @@ class RunQueue:
721 def sigint_handler(signum, frame): 815 def sigint_handler(signum, frame):
722 raise KeyboardInterrupt 816 raise KeyboardInterrupt
723 817
724 # RP - this code allows tasks to run out of the correct order - disabled, FIXME 818 event.fire(bb.event.StampUpdate(self.target_pairs, self.dataCache.stamp, self.cfgdata))
725 # Find any tasks with current stamps and remove them from the queue 819
726 #for task1 in range(len(self.runq_fnid)): 820 # Find out which tasks have current stamps which we can skip when the
727 # task = self.prio_map[task1] 821 # time comes
728 # fn = self.taskData.fn_index[self.runq_fnid[task]] 822 currentstamps = self.check_stamps()
729 # taskname = self.runq_task[task] 823 self.stats.taskSkipped(len(currentstamps))
730 # if bb.build.stamp_is_current(taskname, self.dataCache, fn): 824 self.stats.taskCompleted(len(currentstamps))
731 # bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.get_user_idstring(task)))
732 # self.runq_running[task] = 1
733 # self.task_complete(task)
734 # self.stats.taskCompleted()
735 # self.stats.taskSkipped()
736 825
737 while True: 826 while True:
738 task = self.sched.next() 827 task = self.sched.next()
@@ -740,12 +829,13 @@ class RunQueue:
740 fn = self.taskData.fn_index[self.runq_fnid[task]] 829 fn = self.taskData.fn_index[self.runq_fnid[task]]
741 830
742 taskname = self.runq_task[task] 831 taskname = self.runq_task[task]
743 if bb.build.stamp_is_current(taskname, self.dataCache, fn): 832 if task in currentstamps:
833 #if bb.build.stamp_is_current(taskname, self.dataCache, fn):
744 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.get_user_idstring(task))) 834 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.get_user_idstring(task)))
745 self.runq_running[task] = 1 835 self.runq_running[task] = 1
746 self.task_complete(task) 836 self.task_complete(task)
747 self.stats.taskCompleted() 837 #self.stats.taskCompleted()
748 self.stats.taskSkipped() 838 #self.stats.taskSkipped()
749 continue 839 continue
750 840
751 bb.msg.note(1, bb.msg.domain.RunQueue, "Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.active_builds + 1, len(self.runq_fnid), task, self.get_user_idstring(task))) 841 bb.msg.note(1, bb.msg.domain.RunQueue, "Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.active_builds + 1, len(self.runq_fnid), task, self.get_user_idstring(task)))
@@ -764,7 +854,7 @@ class RunQueue:
764 os.dup2(newsi, sys.stdin.fileno()) 854 os.dup2(newsi, sys.stdin.fileno())
765 self.cooker.configuration.cmd = taskname[3:] 855 self.cooker.configuration.cmd = taskname[3:]
766 try: 856 try:
767 self.cooker.tryBuild(fn, False) 857 self.cooker.tryBuild(fn)
768 except bb.build.EventException: 858 except bb.build.EventException:
769 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed") 859 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed")
770 sys.exit(1) 860 sys.exit(1)
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py
index 745091fb7d..feba3f2b44 100644
--- a/bitbake/lib/bb/shell.py
+++ b/bitbake/lib/bb/shell.py
@@ -243,27 +243,13 @@ class BitBakeShellCommands:
243 oldcmd = cooker.configuration.cmd 243 oldcmd = cooker.configuration.cmd
244 cooker.configuration.cmd = cmd 244 cooker.configuration.cmd = cmd
245 245
246 thisdata = data.createCopy(cooker.configuration.data)
247 data.update_data(thisdata)
248 data.expandKeys(thisdata)
249
250 try: 246 try:
251 bbfile_data = parse.handle( bf, thisdata ) 247 cooker.buildFile(bf)
252 except parse.ParseError: 248 except parse.ParseError:
253 print "ERROR: Unable to open or parse '%s'" % bf 249 print "ERROR: Unable to open or parse '%s'" % bf
254 else: 250 except build.EventException, e:
255 # Remove stamp for target if force mode active 251 print "ERROR: Couldn't build '%s'" % name
256 if cooker.configuration.force: 252 last_exception = e
257 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (cmd, bf))
258 bb.build.del_stamp('do_%s' % cmd, bbfile_data)
259
260 item = data.getVar('PN', bbfile_data, 1)
261 data.setVar( "_task_cache", [], bbfile_data ) # force
262 try:
263 cooker.tryBuildPackage( os.path.abspath( bf ), item, cmd, bbfile_data, True )
264 except build.EventException, e:
265 print "ERROR: Couldn't build '%s'" % name
266 last_exception = e
267 253
268 cooker.configuration.cmd = oldcmd 254 cooker.configuration.cmd = oldcmd
269 fileBuild.usage = "<bbfile>" 255 fileBuild.usage = "<bbfile>"
@@ -586,6 +572,7 @@ SRC_URI = ""
586 572
587def completeFilePath( bbfile ): 573def completeFilePath( bbfile ):
588 """Get the complete bbfile path""" 574 """Get the complete bbfile path"""
575 if not cooker.status: return bbfile
589 if not cooker.status.pkg_fn: return bbfile 576 if not cooker.status.pkg_fn: return bbfile
590 for key in cooker.status.pkg_fn.keys(): 577 for key in cooker.status.pkg_fn.keys():
591 if key.endswith( bbfile ): 578 if key.endswith( bbfile ):
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
index 3dac6c26af..4a79e7a56d 100644
--- a/bitbake/lib/bb/taskdata.py
+++ b/bitbake/lib/bb/taskdata.py
@@ -124,7 +124,6 @@ class TaskData:
124 Add tasks for a given fn to the database 124 Add tasks for a given fn to the database
125 """ 125 """
126 126
127 task_graph = dataCache.task_queues[fn]
128 task_deps = dataCache.task_deps[fn] 127 task_deps = dataCache.task_deps[fn]
129 128
130 fnid = self.getfn_id(fn) 129 fnid = self.getfn_id(fn)
@@ -136,11 +135,11 @@ class TaskData:
136 if fnid in self.tasks_fnid: 135 if fnid in self.tasks_fnid:
137 return 136 return
138 137
139 for task in task_graph.allnodes(): 138 for task in task_deps['tasks']:
140 139
141 # Work out task dependencies 140 # Work out task dependencies
142 parentids = [] 141 parentids = []
143 for dep in task_graph.getparents(task): 142 for dep in task_deps['parents'][task]:
144 parentid = self.gettask_id(fn, dep) 143 parentid = self.gettask_id(fn, dep)
145 parentids.append(parentid) 144 parentids.append(parentid)
146 taskid = self.gettask_id(fn, task) 145 taskid = self.gettask_id(fn, task)
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index a2a5ff6cfd..9702c8c204 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -85,11 +85,11 @@ def explode_deps(s):
85 for i in l: 85 for i in l:
86 if i[0] == '(': 86 if i[0] == '(':
87 flag = True 87 flag = True
88 j = [] 88 #j = []
89 if flag: 89 if not flag:
90 j.append(i)
91 else:
92 r.append(i) 90 r.append(i)
91 #else:
92 # j.append(i)
93 if flag and i.endswith(')'): 93 if flag and i.endswith(')'):
94 flag = False 94 flag = False
95 # Ignore version 95 # Ignore version