diff options
Diffstat (limited to 'bitbake/lib')
-rw-r--r-- | bitbake/lib/bb/__init__.py | 16 | ||||
-rw-r--r-- | bitbake/lib/bb/build.py | 52 | ||||
-rw-r--r-- | bitbake/lib/bb/cooker.py | 13 | ||||
-rw-r--r-- | bitbake/lib/bb/data.py | 7 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/__init__.py | 39 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch/svn.py | 9 | ||||
-rw-r--r-- | bitbake/lib/bb/parse/parse_py/BBHandler.py | 18 | ||||
-rw-r--r-- | bitbake/lib/bb/runqueue.py | 30 | ||||
-rw-r--r-- | bitbake/lib/bb/taskdata.py | 41 |
9 files changed, 146 insertions, 79 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index 585eec8875..77b1255c77 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
@@ -21,7 +21,7 @@ | |||
21 | # with this program; if not, write to the Free Software Foundation, Inc., | 21 | # with this program; if not, write to the Free Software Foundation, Inc., |
22 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | 22 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
23 | 23 | ||
24 | __version__ = "1.8.7" | 24 | __version__ = "1.8.9" |
25 | 25 | ||
26 | __all__ = [ | 26 | __all__ = [ |
27 | 27 | ||
@@ -1124,7 +1124,12 @@ class digraph: | |||
1124 | 1124 | ||
1125 | def allnodes(self): | 1125 | def allnodes(self): |
1126 | "returns all nodes in the dictionary" | 1126 | "returns all nodes in the dictionary" |
1127 | return self.dict.keys() | 1127 | keys = self.dict.keys() |
1128 | ret = [] | ||
1129 | for key in keys: | ||
1130 | ret.append(key) | ||
1131 | ret.sort() | ||
1132 | return ret | ||
1128 | 1133 | ||
1129 | def firstzero(self): | 1134 | def firstzero(self): |
1130 | "returns first node with zero references, or NULL if no such node exists" | 1135 | "returns first node with zero references, or NULL if no such node exists" |
@@ -1168,7 +1173,12 @@ class digraph: | |||
1168 | def getparents(self, item): | 1173 | def getparents(self, item): |
1169 | if not self.hasnode(item): | 1174 | if not self.hasnode(item): |
1170 | return [] | 1175 | return [] |
1171 | return self.dict[item][1] | 1176 | parents = self.dict[item][1] |
1177 | ret = [] | ||
1178 | for parent in parents: | ||
1179 | ret.append(parent) | ||
1180 | ret.sort() | ||
1181 | return ret | ||
1172 | 1182 | ||
1173 | def getchildren(self, item): | 1183 | def getchildren(self, item): |
1174 | if not self.hasnode(item): | 1184 | if not self.hasnode(item): |
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index bcbc55eea5..e9a6fc8c61 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
@@ -397,35 +397,41 @@ def del_stamp(task, d, file_name = None): | |||
397 | """ | 397 | """ |
398 | stamp_internal(task, d, file_name) | 398 | stamp_internal(task, d, file_name) |
399 | 399 | ||
400 | def add_task(task, deps, d): | 400 | def add_tasks(tasklist, d): |
401 | task_graph = data.getVar('_task_graph', d) | 401 | task_graph = data.getVar('_task_graph', d) |
402 | task_deps = data.getVar('_task_deps', d) | ||
402 | if not task_graph: | 403 | if not task_graph: |
403 | task_graph = bb.digraph() | 404 | task_graph = bb.digraph() |
404 | data.setVarFlag(task, 'task', 1, d) | ||
405 | task_graph.addnode(task, None) | ||
406 | for dep in deps: | ||
407 | if not task_graph.hasnode(dep): | ||
408 | task_graph.addnode(dep, None) | ||
409 | task_graph.addnode(task, dep) | ||
410 | # don't assume holding a reference | ||
411 | data.setVar('_task_graph', task_graph, d) | ||
412 | |||
413 | task_deps = data.getVar('_task_deps', d) | ||
414 | if not task_deps: | 405 | if not task_deps: |
415 | task_deps = {} | 406 | task_deps = {} |
416 | def getTask(name): | ||
417 | deptask = data.getVarFlag(task, name, d) | ||
418 | if deptask: | ||
419 | deptask = data.expand(deptask, d) | ||
420 | if not name in task_deps: | ||
421 | task_deps[name] = {} | ||
422 | task_deps[name][task] = deptask | ||
423 | getTask('depends') | ||
424 | getTask('deptask') | ||
425 | getTask('rdeptask') | ||
426 | getTask('recrdeptask') | ||
427 | getTask('nostamp') | ||
428 | 407 | ||
408 | for task in tasklist: | ||
409 | deps = tasklist[task] | ||
410 | task = data.expand(task, d) | ||
411 | |||
412 | data.setVarFlag(task, 'task', 1, d) | ||
413 | task_graph.addnode(task, None) | ||
414 | for dep in deps: | ||
415 | dep = data.expand(dep, d) | ||
416 | if not task_graph.hasnode(dep): | ||
417 | task_graph.addnode(dep, None) | ||
418 | task_graph.addnode(task, dep) | ||
419 | |||
420 | flags = data.getVarFlags(task, d) | ||
421 | def getTask(name): | ||
422 | if name in flags: | ||
423 | deptask = data.expand(flags[name], d) | ||
424 | if not name in task_deps: | ||
425 | task_deps[name] = {} | ||
426 | task_deps[name][task] = deptask | ||
427 | getTask('depends') | ||
428 | getTask('deptask') | ||
429 | getTask('rdeptask') | ||
430 | getTask('recrdeptask') | ||
431 | getTask('nostamp') | ||
432 | |||
433 | # don't assume holding a reference | ||
434 | data.setVar('_task_graph', task_graph, d) | ||
429 | data.setVar('_task_deps', task_deps, d) | 435 | data.setVar('_task_deps', task_deps, d) |
430 | 436 | ||
431 | def remove_task(task, kill, d): | 437 | def remove_task(task, kill, d): |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 955fbb434c..0eda9eed99 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
@@ -73,6 +73,19 @@ class BBCooker: | |||
73 | self.configuration.event_data = bb.data.createCopy(self.configuration.data) | 73 | self.configuration.event_data = bb.data.createCopy(self.configuration.data) |
74 | bb.data.update_data(self.configuration.event_data) | 74 | bb.data.update_data(self.configuration.event_data) |
75 | 75 | ||
76 | # | ||
77 | # TOSTOP must not be set or our children will hang when they output | ||
78 | # | ||
79 | fd = sys.stdout.fileno() | ||
80 | if os.isatty(fd): | ||
81 | import termios | ||
82 | tcattr = termios.tcgetattr(fd) | ||
83 | if tcattr[3] & termios.TOSTOP: | ||
84 | bb.msg.note(1, bb.msg.domain.Build, "The terminal had the TOSTOP bit set, clearing...") | ||
85 | tcattr[3] = tcattr[3] & ~termios.TOSTOP | ||
86 | termios.tcsetattr(fd, termios.TCSANOW, tcattr) | ||
87 | |||
88 | |||
76 | def tryBuildPackage(self, fn, item, task, the_data, build_depends): | 89 | def tryBuildPackage(self, fn, item, task, the_data, build_depends): |
77 | """ | 90 | """ |
78 | Build one task of a package, optionally build following task depends | 91 | Build one task of a package, optionally build following task depends |
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 21cdde04a8..7ad1acad1c 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py | |||
@@ -282,6 +282,7 @@ def expandKeys(alterdata, readdata = None): | |||
282 | if readdata == None: | 282 | if readdata == None: |
283 | readdata = alterdata | 283 | readdata = alterdata |
284 | 284 | ||
285 | todolist = {} | ||
285 | for key in keys(alterdata): | 286 | for key in keys(alterdata): |
286 | if not '${' in key: | 287 | if not '${' in key: |
287 | continue | 288 | continue |
@@ -289,7 +290,13 @@ def expandKeys(alterdata, readdata = None): | |||
289 | ekey = expand(key, readdata) | 290 | ekey = expand(key, readdata) |
290 | if key == ekey: | 291 | if key == ekey: |
291 | continue | 292 | continue |
293 | todolist[key] = ekey | ||
292 | 294 | ||
295 | # These two for loops are split for performance to maximise the | ||
296 | # usefulness of the expand cache | ||
297 | |||
298 | for key in todolist: | ||
299 | ekey = todolist[key] | ||
293 | renameVar(key, ekey, alterdata) | 300 | renameVar(key, ekey, alterdata) |
294 | 301 | ||
295 | def expandData(alterdata, readdata = None): | 302 | def expandData(alterdata, readdata = None): |
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py index bbff516ffc..c34405738b 100644 --- a/bitbake/lib/bb/fetch/__init__.py +++ b/bitbake/lib/bb/fetch/__init__.py | |||
@@ -135,26 +135,27 @@ def go(d): | |||
135 | for u in urldata: | 135 | for u in urldata: |
136 | ud = urldata[u] | 136 | ud = urldata[u] |
137 | m = ud.method | 137 | m = ud.method |
138 | if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): | 138 | if ud.localfile: |
139 | # File already present along with md5 stamp file | 139 | if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): |
140 | # Touch md5 file to show activity | 140 | # File already present along with md5 stamp file |
141 | os.utime(ud.md5, None) | 141 | # Touch md5 file to show activity |
142 | continue | 142 | os.utime(ud.md5, None) |
143 | lf = open(ud.lockfile, "a+") | 143 | continue |
144 | fcntl.flock(lf.fileno(), fcntl.LOCK_EX) | 144 | lf = open(ud.lockfile, "a+") |
145 | if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): | 145 | fcntl.flock(lf.fileno(), fcntl.LOCK_EX) |
146 | # If someone else fetched this before we got the lock, | 146 | if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): |
147 | # notice and don't try again | 147 | # If someone else fetched this before we got the lock, |
148 | os.utime(ud.md5, None) | 148 | # notice and don't try again |
149 | os.utime(ud.md5, None) | ||
150 | fcntl.flock(lf.fileno(), fcntl.LOCK_UN) | ||
151 | lf.close | ||
152 | continue | ||
153 | m.go(u, ud, d) | ||
154 | if ud.localfile: | ||
155 | if not m.forcefetch(u, ud, d): | ||
156 | Fetch.write_md5sum(u, ud, d) | ||
149 | fcntl.flock(lf.fileno(), fcntl.LOCK_UN) | 157 | fcntl.flock(lf.fileno(), fcntl.LOCK_UN) |
150 | lf.close | 158 | lf.close |
151 | continue | ||
152 | m.go(u, ud, d) | ||
153 | if ud.localfile and not m.forcefetch(u, ud, d): | ||
154 | Fetch.write_md5sum(u, ud, d) | ||
155 | fcntl.flock(lf.fileno(), fcntl.LOCK_UN) | ||
156 | lf.close | ||
157 | |||
158 | 159 | ||
159 | def localpaths(d): | 160 | def localpaths(d): |
160 | """ | 161 | """ |
@@ -339,7 +340,7 @@ class Fetch(object): | |||
339 | pn = data.getVar("PN", d, 1) | 340 | pn = data.getVar("PN", d, 1) |
340 | 341 | ||
341 | if pn: | 342 | if pn: |
342 | return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("DATE", d, 1) | 343 | return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) |
343 | 344 | ||
344 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | 345 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) |
345 | getSRCDate = staticmethod(getSRCDate) | 346 | getSRCDate = staticmethod(getSRCDate) |
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py index ca12efe158..af8543ab34 100644 --- a/bitbake/lib/bb/fetch/svn.py +++ b/bitbake/lib/bb/fetch/svn.py | |||
@@ -74,11 +74,14 @@ class Svn(Fetch): | |||
74 | ud.revision = "" | 74 | ud.revision = "" |
75 | else: | 75 | else: |
76 | rev = data.getVar("SRCREV", d, 0) | 76 | rev = data.getVar("SRCREV", d, 0) |
77 | if "get_srcrev" in rev: | 77 | if rev and "get_srcrev" in rev: |
78 | ud.revision = self.latest_revision(url, ud, d) | 78 | ud.revision = self.latest_revision(url, ud, d) |
79 | else: | 79 | ud.date = "" |
80 | elif rev: | ||
80 | ud.revision = rev | 81 | ud.revision = rev |
81 | ud.date = "" | 82 | ud.date = "" |
83 | else: | ||
84 | ud.revision = "" | ||
82 | 85 | ||
83 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | 86 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) |
84 | 87 | ||
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index aaa262d3e2..0f19f9a5d5 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
@@ -176,15 +176,23 @@ def handle(fn, d, include = 0): | |||
176 | if bb.event.register(var,handler) == bb.event.Registered: | 176 | if bb.event.register(var,handler) == bb.event.Registered: |
177 | all_handlers[var] = handler | 177 | all_handlers[var] = handler |
178 | 178 | ||
179 | tasklist = {} | ||
179 | for var in data.getVar('__BBTASKS', d) or []: | 180 | for var in data.getVar('__BBTASKS', d) or []: |
181 | if var not in tasklist: | ||
182 | tasklist[var] = [] | ||
180 | deps = data.getVarFlag(var, 'deps', d) or [] | 183 | deps = data.getVarFlag(var, 'deps', d) or [] |
184 | for p in deps: | ||
185 | if p not in tasklist[var]: | ||
186 | tasklist[var].append(p) | ||
187 | |||
181 | postdeps = data.getVarFlag(var, 'postdeps', d) or [] | 188 | postdeps = data.getVarFlag(var, 'postdeps', d) or [] |
182 | bb.build.add_task(var, deps, d) | ||
183 | for p in postdeps: | 189 | for p in postdeps: |
184 | pdeps = data.getVarFlag(p, 'deps', d) or [] | 190 | if p not in tasklist: |
185 | pdeps.append(var) | 191 | tasklist[p] = [] |
186 | data.setVarFlag(p, 'deps', pdeps, d) | 192 | if var not in tasklist[p]: |
187 | bb.build.add_task(p, pdeps, d) | 193 | tasklist[p].append(var) |
194 | |||
195 | bb.build.add_tasks(tasklist, d) | ||
188 | 196 | ||
189 | # now add the handlers | 197 | # now add the handlers |
190 | if not len(all_handlers) == 0: | 198 | if not len(all_handlers) == 0: |
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index c55a58da2b..3dfae219d2 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
@@ -137,7 +137,7 @@ class RunQueue: | |||
137 | dep = taskData.fn_index[depdata] | 137 | dep = taskData.fn_index[depdata] |
138 | depends.append(taskData.gettask_id(dep, idepend.split(":")[1])) | 138 | depends.append(taskData.gettask_id(dep, idepend.split(":")[1])) |
139 | 139 | ||
140 | def add_recursive_build(depid): | 140 | def add_recursive_build(depid, depfnid): |
141 | """ | 141 | """ |
142 | Add build depends of depid to depends | 142 | Add build depends of depid to depends |
143 | (if we've not see it before) | 143 | (if we've not see it before) |
@@ -150,26 +150,28 @@ class RunQueue: | |||
150 | depdata = taskData.build_targets[depid][0] | 150 | depdata = taskData.build_targets[depid][0] |
151 | if depdata is not None: | 151 | if depdata is not None: |
152 | dep = taskData.fn_index[depdata] | 152 | dep = taskData.fn_index[depdata] |
153 | idepends = [] | ||
153 | # Need to avoid creating new tasks here | 154 | # Need to avoid creating new tasks here |
154 | taskid = taskData.gettask_id(dep, taskname, False) | 155 | taskid = taskData.gettask_id(dep, taskname, False) |
155 | if taskid is not None: | 156 | if taskid is not None: |
156 | depends.append(taskid) | 157 | depends.append(taskid) |
157 | fnid = taskData.tasks_fnid[taskid] | 158 | fnid = taskData.tasks_fnid[taskid] |
159 | idepends = taskData.tasks_idepends[taskid] | ||
160 | #print "Added %s (%s) due to %s" % (taskid, taskData.fn_index[fnid], taskData.fn_index[depfnid]) | ||
158 | else: | 161 | else: |
159 | fnid = taskData.getfn_id(dep) | 162 | fnid = taskData.getfn_id(dep) |
160 | for nextdepid in taskData.depids[fnid]: | 163 | for nextdepid in taskData.depids[fnid]: |
161 | if nextdepid not in dep_seen: | 164 | if nextdepid not in dep_seen: |
162 | add_recursive_build(nextdepid) | 165 | add_recursive_build(nextdepid, fnid) |
163 | for nextdepid in taskData.rdepids[fnid]: | 166 | for nextdepid in taskData.rdepids[fnid]: |
164 | if nextdepid not in rdep_seen: | 167 | if nextdepid not in rdep_seen: |
165 | add_recursive_run(nextdepid) | 168 | add_recursive_run(nextdepid, fnid) |
166 | idepends = taskData.tasks_idepends[depid] | ||
167 | for idepend in idepends: | 169 | for idepend in idepends: |
168 | nextdepid = int(idepend.split(":")[0]) | 170 | nextdepid = int(idepend.split(":")[0]) |
169 | if nextdepid not in dep_seen: | 171 | if nextdepid not in dep_seen: |
170 | add_recursive_build(nextdepid) | 172 | add_recursive_build(nextdepid, fnid) |
171 | 173 | ||
172 | def add_recursive_run(rdepid): | 174 | def add_recursive_run(rdepid, depfnid): |
173 | """ | 175 | """ |
174 | Add runtime depends of rdepid to depends | 176 | Add runtime depends of rdepid to depends |
175 | (if we've not see it before) | 177 | (if we've not see it before) |
@@ -182,24 +184,26 @@ class RunQueue: | |||
182 | depdata = taskData.run_targets[rdepid][0] | 184 | depdata = taskData.run_targets[rdepid][0] |
183 | if depdata is not None: | 185 | if depdata is not None: |
184 | dep = taskData.fn_index[depdata] | 186 | dep = taskData.fn_index[depdata] |
187 | idepends = [] | ||
185 | # Need to avoid creating new tasks here | 188 | # Need to avoid creating new tasks here |
186 | taskid = taskData.gettask_id(dep, taskname, False) | 189 | taskid = taskData.gettask_id(dep, taskname, False) |
187 | if taskid is not None: | 190 | if taskid is not None: |
188 | depends.append(taskid) | 191 | depends.append(taskid) |
189 | fnid = taskData.tasks_fnid[taskid] | 192 | fnid = taskData.tasks_fnid[taskid] |
193 | idepends = taskData.tasks_idepends[taskid] | ||
194 | #print "Added %s (%s) due to %s" % (taskid, taskData.fn_index[fnid], taskData.fn_index[depfnid]) | ||
190 | else: | 195 | else: |
191 | fnid = taskData.getfn_id(dep) | 196 | fnid = taskData.getfn_id(dep) |
192 | for nextdepid in taskData.depids[fnid]: | 197 | for nextdepid in taskData.depids[fnid]: |
193 | if nextdepid not in dep_seen: | 198 | if nextdepid not in dep_seen: |
194 | add_recursive_build(nextdepid) | 199 | add_recursive_build(nextdepid, fnid) |
195 | for nextdepid in taskData.rdepids[fnid]: | 200 | for nextdepid in taskData.rdepids[fnid]: |
196 | if nextdepid not in rdep_seen: | 201 | if nextdepid not in rdep_seen: |
197 | add_recursive_run(nextdepid) | 202 | add_recursive_run(nextdepid, fnid) |
198 | idepends = taskData.tasks_idepends[rdepid] | ||
199 | for idepend in idepends: | 203 | for idepend in idepends: |
200 | nextdepid = int(idepend.split(":")[0]) | 204 | nextdepid = int(idepend.split(":")[0]) |
201 | if nextdepid not in dep_seen: | 205 | if nextdepid not in dep_seen: |
202 | add_recursive_build(nextdepid) | 206 | add_recursive_build(nextdepid, fnid) |
203 | 207 | ||
204 | 208 | ||
205 | # Resolve Recursive Runtime Depends | 209 | # Resolve Recursive Runtime Depends |
@@ -210,12 +214,12 @@ class RunQueue: | |||
210 | rdep_seen = [] | 214 | rdep_seen = [] |
211 | idep_seen = [] | 215 | idep_seen = [] |
212 | for depid in taskData.depids[fnid]: | 216 | for depid in taskData.depids[fnid]: |
213 | add_recursive_build(depid) | 217 | add_recursive_build(depid, fnid) |
214 | for rdepid in taskData.rdepids[fnid]: | 218 | for rdepid in taskData.rdepids[fnid]: |
215 | add_recursive_run(rdepid) | 219 | add_recursive_run(rdepid, fnid) |
216 | for idepend in idepends: | 220 | for idepend in idepends: |
217 | depid = int(idepend.split(":")[0]) | 221 | depid = int(idepend.split(":")[0]) |
218 | add_recursive_build(depid) | 222 | add_recursive_build(depid, fnid) |
219 | 223 | ||
220 | #Prune self references | 224 | #Prune self references |
221 | if task in depends: | 225 | if task in depends: |
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py index f448b5b666..902cc140ef 100644 --- a/bitbake/lib/bb/taskdata.py +++ b/bitbake/lib/bb/taskdata.py | |||
@@ -450,10 +450,12 @@ class TaskData: | |||
450 | self.add_runtime_target(fn, item) | 450 | self.add_runtime_target(fn, item) |
451 | self.add_tasks(fn, dataCache) | 451 | self.add_tasks(fn, dataCache) |
452 | 452 | ||
453 | def fail_fnid(self, fnid): | 453 | def fail_fnid(self, fnid, missing_list = []): |
454 | """ | 454 | """ |
455 | Mark a file as failed (unbuildable) | 455 | Mark a file as failed (unbuildable) |
456 | Remove any references from build and runtime provider lists | 456 | Remove any references from build and runtime provider lists |
457 | |||
458 | missing_list, A list of missing requirements for this target | ||
457 | """ | 459 | """ |
458 | if fnid in self.failed_fnids: | 460 | if fnid in self.failed_fnids: |
459 | return | 461 | return |
@@ -463,14 +465,14 @@ class TaskData: | |||
463 | if fnid in self.build_targets[target]: | 465 | if fnid in self.build_targets[target]: |
464 | self.build_targets[target].remove(fnid) | 466 | self.build_targets[target].remove(fnid) |
465 | if len(self.build_targets[target]) == 0: | 467 | if len(self.build_targets[target]) == 0: |
466 | self.remove_buildtarget(target) | 468 | self.remove_buildtarget(target, missing_list) |
467 | for target in self.run_targets: | 469 | for target in self.run_targets: |
468 | if fnid in self.run_targets[target]: | 470 | if fnid in self.run_targets[target]: |
469 | self.run_targets[target].remove(fnid) | 471 | self.run_targets[target].remove(fnid) |
470 | if len(self.run_targets[target]) == 0: | 472 | if len(self.run_targets[target]) == 0: |
471 | self.remove_runtarget(target) | 473 | self.remove_runtarget(target, missing_list) |
472 | 474 | ||
473 | def remove_buildtarget(self, targetid): | 475 | def remove_buildtarget(self, targetid, missing_list = []): |
474 | """ | 476 | """ |
475 | Mark a build target as failed (unbuildable) | 477 | Mark a build target as failed (unbuildable) |
476 | Trigger removal of any files that have this as a dependency | 478 | Trigger removal of any files that have this as a dependency |
@@ -479,21 +481,21 @@ class TaskData: | |||
479 | self.failed_deps.append(targetid) | 481 | self.failed_deps.append(targetid) |
480 | dependees = self.get_dependees(targetid) | 482 | dependees = self.get_dependees(targetid) |
481 | for fnid in dependees: | 483 | for fnid in dependees: |
482 | self.fail_fnid(fnid) | 484 | self.fail_fnid(fnid, [self.build_names_index[targetid]]+missing_list) |
483 | if self.abort and targetid in self.external_targets: | 485 | if self.abort and targetid in self.external_targets: |
484 | bb.msg.error(bb.msg.domain.Provider, "No buildable providers available for required build target %s" % self.build_names_index[targetid]) | 486 | bb.msg.error(bb.msg.domain.Provider, "No buildable providers available for required build target %s ('%s')" % (self.build_names_index[targetid], missing_list)) |
485 | raise bb.providers.NoProvider | 487 | raise bb.providers.NoProvider |
486 | 488 | ||
487 | def remove_runtarget(self, targetid): | 489 | def remove_runtarget(self, targetid, missing_list = []): |
488 | """ | 490 | """ |
489 | Mark a run target as failed (unbuildable) | 491 | Mark a run target as failed (unbuildable) |
490 | Trigger removal of any files that have this as a dependency | 492 | Trigger removal of any files that have this as a dependency |
491 | """ | 493 | """ |
492 | bb.msg.note(1, bb.msg.domain.Provider, "Removing failed runtime build target %s" % self.run_names_index[targetid]) | 494 | bb.msg.note(1, bb.msg.domain.Provider, "Removing failed runtime build target %s ('%s')" % (self.run_names_index[targetid], missing_list)) |
493 | self.failed_rdeps.append(targetid) | 495 | self.failed_rdeps.append(targetid) |
494 | dependees = self.get_rdependees(targetid) | 496 | dependees = self.get_rdependees(targetid) |
495 | for fnid in dependees: | 497 | for fnid in dependees: |
496 | self.fail_fnid(fnid) | 498 | self.fail_fnid(fnid, [self.run_names_index[targetid]]+missing_list) |
497 | 499 | ||
498 | def add_unresolved(self, cfgData, dataCache): | 500 | def add_unresolved(self, cfgData, dataCache): |
499 | """ | 501 | """ |
@@ -529,14 +531,26 @@ class TaskData: | |||
529 | """ | 531 | """ |
530 | bb.msg.debug(3, bb.msg.domain.TaskData, "build_names:") | 532 | bb.msg.debug(3, bb.msg.domain.TaskData, "build_names:") |
531 | bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.build_names_index)) | 533 | bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.build_names_index)) |
534 | |||
532 | bb.msg.debug(3, bb.msg.domain.TaskData, "run_names:") | 535 | bb.msg.debug(3, bb.msg.domain.TaskData, "run_names:") |
533 | bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.run_names_index)) | 536 | bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.run_names_index)) |
537 | |||
534 | bb.msg.debug(3, bb.msg.domain.TaskData, "build_targets:") | 538 | bb.msg.debug(3, bb.msg.domain.TaskData, "build_targets:") |
535 | for target in self.build_targets.keys(): | 539 | for buildid in range(len(self.build_names_index)): |
536 | bb.msg.debug(3, bb.msg.domain.TaskData, " %s: %s" % (self.build_names_index[target], self.build_targets[target])) | 540 | target = self.build_names_index[buildid] |
541 | targets = "None" | ||
542 | if buildid in self.build_targets: | ||
543 | targets = self.build_targets[buildid] | ||
544 | bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (buildid, target, targets)) | ||
545 | |||
537 | bb.msg.debug(3, bb.msg.domain.TaskData, "run_targets:") | 546 | bb.msg.debug(3, bb.msg.domain.TaskData, "run_targets:") |
538 | for target in self.run_targets.keys(): | 547 | for runid in range(len(self.run_names_index)): |
539 | bb.msg.debug(3, bb.msg.domain.TaskData, " %s: %s" % (self.run_names_index[target], self.run_targets[target])) | 548 | target = self.run_names_index[runid] |
549 | targets = "None" | ||
550 | if runid in self.run_targets: | ||
551 | targets = self.run_targets[runid] | ||
552 | bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (runid, target, targets)) | ||
553 | |||
540 | bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:") | 554 | bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:") |
541 | for task in range(len(self.tasks_name)): | 555 | for task in range(len(self.tasks_name)): |
542 | bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % ( | 556 | bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % ( |
@@ -544,6 +558,7 @@ class TaskData: | |||
544 | self.fn_index[self.tasks_fnid[task]], | 558 | self.fn_index[self.tasks_fnid[task]], |
545 | self.tasks_name[task], | 559 | self.tasks_name[task], |
546 | self.tasks_tdepends[task])) | 560 | self.tasks_tdepends[task])) |
561 | |||
547 | bb.msg.debug(3, bb.msg.domain.TaskData, "runtime ids (per fn):") | 562 | bb.msg.debug(3, bb.msg.domain.TaskData, "runtime ids (per fn):") |
548 | for fnid in self.rdepids: | 563 | for fnid in self.rdepids: |
549 | bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid])) | 564 | bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid])) |