diff options
author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2019-07-29 17:28:20 +0100 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2019-08-06 11:21:32 +0100 |
commit | 4fbb862cdcdcfa44da83f9a7e7a74ab518ef67d0 (patch) | |
tree | 0fa843c4a2f3f5c7dc69907cb4e40d47c9443109 | |
parent | 40a5e193c4ba45c928fccd899415ea56b5417725 (diff) | |
download | poky-4fbb862cdcdcfa44da83f9a7e7a74ab518ef67d0.tar.gz |
bitbake: siggen: Clean up task reference formats
Currently siggen uses the format "<filename>.<taskname>" for referencing tasks
whilst runqueue uses "<filename>:<taskname>". This converts to use ":" as the
separator everywhere.
This is an API breaking change since the cache is affected, as are siginfo files
and any custom signature handlers such as those in OE-Core.
Ultimately this will let us clean up and the accessor functions from runqueue,
removing all the ".rsplit(".", 1)[0]" type code currently all over the place.
Once a standard is used everwhere we can update the code over time to be more
optimal.
(Bitbake rev: 07e539e1c566ca3434901e1a00335cb76c69d496)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | bitbake/lib/bb/cache.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/data.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/runqueue.py | 19 | ||||
-rw-r--r-- | bitbake/lib/bb/siggen.py | 152 | ||||
-rw-r--r-- | bitbake/lib/bb/tests/data.py | 2 |
5 files changed, 82 insertions, 95 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 233f994279..b6f7da5920 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -220,7 +220,7 @@ class CoreRecipeInfo(RecipeInfoCommon): | |||
220 | 220 | ||
221 | cachedata.hashfn[fn] = self.hashfilename | 221 | cachedata.hashfn[fn] = self.hashfilename |
222 | for task, taskhash in self.basetaskhashes.items(): | 222 | for task, taskhash in self.basetaskhashes.items(): |
223 | identifier = '%s.%s' % (fn, task) | 223 | identifier = '%s:%s' % (fn, task) |
224 | cachedata.basetaskhash[identifier] = taskhash | 224 | cachedata.basetaskhash[identifier] = taskhash |
225 | 225 | ||
226 | cachedata.inherits[fn] = self.inherits | 226 | cachedata.inherits[fn] = self.inherits |
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py index 92ef405304..443615b977 100644 --- a/bitbake/lib/bb/data.py +++ b/bitbake/lib/bb/data.py | |||
@@ -422,7 +422,7 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn): | |||
422 | var = lookupcache[dep] | 422 | var = lookupcache[dep] |
423 | if var is not None: | 423 | if var is not None: |
424 | data = data + str(var) | 424 | data = data + str(var) |
425 | k = fn + "." + task | 425 | k = fn + ":" + task |
426 | basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest() | 426 | basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest() |
427 | taskdeps[task] = alldeps | 427 | taskdeps[task] = alldeps |
428 | 428 | ||
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index f0f95f9b5e..bb61087359 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
@@ -1174,10 +1174,9 @@ class RunQueueData: | |||
1174 | def prepare_task_hash(self, tid): | 1174 | def prepare_task_hash(self, tid): |
1175 | procdep = [] | 1175 | procdep = [] |
1176 | for dep in self.runtaskentries[tid].depends: | 1176 | for dep in self.runtaskentries[tid].depends: |
1177 | procdep.append(fn_from_tid(dep) + "." + taskname_from_tid(dep)) | 1177 | procdep.append(dep) |
1178 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | 1178 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, procdep, self.dataCaches[mc_from_tid(tid)]) |
1179 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(taskfn, taskname, procdep, self.dataCaches[mc]) | 1179 | self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) |
1180 | self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(taskfn + "." + taskname) | ||
1181 | 1180 | ||
1182 | def dump_data(self): | 1181 | def dump_data(self): |
1183 | """ | 1182 | """ |
@@ -1401,7 +1400,7 @@ class RunQueue: | |||
1401 | sq_data['hashfn'][tid] = self.rqdata.dataCaches[mc].hashfn[taskfn] | 1400 | sq_data['hashfn'][tid] = self.rqdata.dataCaches[mc].hashfn[taskfn] |
1402 | sq_data['unihash'][tid] = self.rqdata.runtaskentries[tid].unihash | 1401 | sq_data['unihash'][tid] = self.rqdata.runtaskentries[tid].unihash |
1403 | 1402 | ||
1404 | valid_ids = self.validate_hash(sq_data, data, siginfo, currentcount) | 1403 | valid = self.validate_hash(sq_data, data, siginfo, currentcount) |
1405 | 1404 | ||
1406 | return valid | 1405 | return valid |
1407 | 1406 | ||
@@ -2152,8 +2151,7 @@ class RunQueueExecute: | |||
2152 | if unihash != self.rqdata.runtaskentries[tid].unihash: | 2151 | if unihash != self.rqdata.runtaskentries[tid].unihash: |
2153 | logger.info("Task %s unihash changed to %s" % (tid, unihash)) | 2152 | logger.info("Task %s unihash changed to %s" % (tid, unihash)) |
2154 | self.rqdata.runtaskentries[tid].unihash = unihash | 2153 | self.rqdata.runtaskentries[tid].unihash = unihash |
2155 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | 2154 | bb.parse.siggen.set_unihash(tid, unihash) |
2156 | bb.parse.siggen.set_unihash(taskfn + "." + taskname, unihash) | ||
2157 | 2155 | ||
2158 | # Work out all tasks which depend on this one | 2156 | # Work out all tasks which depend on this one |
2159 | total = set() | 2157 | total = set() |
@@ -2177,12 +2175,11 @@ class RunQueueExecute: | |||
2177 | continue | 2175 | continue |
2178 | procdep = [] | 2176 | procdep = [] |
2179 | for dep in self.rqdata.runtaskentries[tid].depends: | 2177 | for dep in self.rqdata.runtaskentries[tid].depends: |
2180 | procdep.append(fn_from_tid(dep) + "." + taskname_from_tid(dep)) | 2178 | procdep.append(dep) |
2181 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | ||
2182 | orighash = self.rqdata.runtaskentries[tid].hash | 2179 | orighash = self.rqdata.runtaskentries[tid].hash |
2183 | self.rqdata.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(taskfn, taskname, procdep, self.rqdata.dataCaches[mc]) | 2180 | self.rqdata.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, procdep, self.rqdata.dataCaches[mc_from_tid(tid)]) |
2184 | origuni = self.rqdata.runtaskentries[tid].unihash | 2181 | origuni = self.rqdata.runtaskentries[tid].unihash |
2185 | self.rqdata.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(taskfn + "." + taskname) | 2182 | self.rqdata.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) |
2186 | logger.debug(1, "Task %s hash changes: %s->%s %s->%s" % (tid, orighash, self.rqdata.runtaskentries[tid].hash, origuni, self.rqdata.runtaskentries[tid].unihash)) | 2183 | logger.debug(1, "Task %s hash changes: %s->%s %s->%s" % (tid, orighash, self.rqdata.runtaskentries[tid].hash, origuni, self.rqdata.runtaskentries[tid].unihash)) |
2187 | next |= self.rqdata.runtaskentries[tid].revdeps | 2184 | next |= self.rqdata.runtaskentries[tid].revdeps |
2188 | changed.add(tid) | 2185 | changed.add(tid) |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 368209807f..912c92c8be 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
@@ -46,13 +46,12 @@ class SignatureGenerator(object): | |||
46 | def finalise(self, fn, d, varient): | 46 | def finalise(self, fn, d, varient): |
47 | return | 47 | return |
48 | 48 | ||
49 | def get_unihash(self, task): | 49 | def get_unihash(self, tid): |
50 | return self.taskhash[task] | 50 | return self.taskhash[tid] |
51 | 51 | ||
52 | def get_taskhash(self, fn, task, deps, dataCache): | 52 | def get_taskhash(self, tid, deps, dataCache): |
53 | k = fn + "." + task | 53 | self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest() |
54 | self.taskhash[k] = hashlib.sha256(k.encode("utf-8")).hexdigest() | 54 | return self.taskhash[tid] |
55 | return self.taskhash[k] | ||
56 | 55 | ||
57 | def writeout_file_checksum_cache(self): | 56 | def writeout_file_checksum_cache(self): |
58 | """Write/update the file checksum cache onto disk""" | 57 | """Write/update the file checksum cache onto disk""" |
@@ -106,7 +105,6 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
106 | self.taints = {} | 105 | self.taints = {} |
107 | self.gendeps = {} | 106 | self.gendeps = {} |
108 | self.lookupcache = {} | 107 | self.lookupcache = {} |
109 | self.pkgnameextract = re.compile(r"(?P<fn>.*)\..*") | ||
110 | self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split()) | 108 | self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split()) |
111 | self.taskwhitelist = None | 109 | self.taskwhitelist = None |
112 | self.init_rundepcheck(data) | 110 | self.init_rundepcheck(data) |
@@ -135,16 +133,16 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
135 | taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn) | 133 | taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn) |
136 | 134 | ||
137 | for task in tasklist: | 135 | for task in tasklist: |
138 | k = fn + "." + task | 136 | tid = fn + ":" + task |
139 | if not ignore_mismatch and k in self.basehash and self.basehash[k] != basehash[k]: | 137 | if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]: |
140 | bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (k, self.basehash[k], basehash[k])) | 138 | bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid])) |
141 | bb.error("The following commands may help:") | 139 | bb.error("The following commands may help:") |
142 | cmd = "$ bitbake %s -c%s" % (d.getVar('PN'), task) | 140 | cmd = "$ bitbake %s -c%s" % (d.getVar('PN'), task) |
143 | # Make sure sigdata is dumped before run printdiff | 141 | # Make sure sigdata is dumped before run printdiff |
144 | bb.error("%s -Snone" % cmd) | 142 | bb.error("%s -Snone" % cmd) |
145 | bb.error("Then:") | 143 | bb.error("Then:") |
146 | bb.error("%s -Sprintdiff\n" % cmd) | 144 | bb.error("%s -Sprintdiff\n" % cmd) |
147 | self.basehash[k] = basehash[k] | 145 | self.basehash[tid] = basehash[tid] |
148 | 146 | ||
149 | self.taskdeps[fn] = taskdeps | 147 | self.taskdeps[fn] = taskdeps |
150 | self.gendeps[fn] = gendeps | 148 | self.gendeps[fn] = gendeps |
@@ -171,7 +169,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
171 | # self.dump_sigtask(fn, task, d.getVar("STAMP"), False) | 169 | # self.dump_sigtask(fn, task, d.getVar("STAMP"), False) |
172 | 170 | ||
173 | for task in taskdeps: | 171 | for task in taskdeps: |
174 | d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task]) | 172 | d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + ":" + task]) |
175 | 173 | ||
176 | def rundep_check(self, fn, recipename, task, dep, depname, dataCache): | 174 | def rundep_check(self, fn, recipename, task, dep, depname, dataCache): |
177 | # Return True if we should keep the dependency, False to drop it | 175 | # Return True if we should keep the dependency, False to drop it |
@@ -191,33 +189,26 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
191 | pass | 189 | pass |
192 | return taint | 190 | return taint |
193 | 191 | ||
194 | def get_taskhash(self, fn, task, deps, dataCache): | 192 | def get_taskhash(self, tid, deps, dataCache): |
195 | 193 | ||
196 | mc = '' | 194 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) |
197 | if fn.startswith('mc:'): | ||
198 | mc = fn.split(':')[1] | ||
199 | k = fn + "." + task | ||
200 | 195 | ||
201 | data = dataCache.basetaskhash[k] | 196 | data = dataCache.basetaskhash[tid] |
202 | self.basehash[k] = data | 197 | self.basehash[tid] = data |
203 | self.runtaskdeps[k] = [] | 198 | self.runtaskdeps[tid] = [] |
204 | self.file_checksum_values[k] = [] | 199 | self.file_checksum_values[tid] = [] |
205 | recipename = dataCache.pkg_fn[fn] | 200 | recipename = dataCache.pkg_fn[fn] |
206 | for dep in sorted(deps, key=clean_basepath): | 201 | for dep in sorted(deps, key=clean_basepath): |
207 | pkgname = self.pkgnameextract.search(dep).group('fn') | 202 | (depmc, _, deptaskname, depfn) = bb.runqueue.split_tid_mcfn(dep) |
208 | if mc: | 203 | if mc != depmc: |
209 | depmc = pkgname.split(':')[1] | ||
210 | if mc != depmc: | ||
211 | continue | ||
212 | if dep.startswith("mc:") and not mc: | ||
213 | continue | 204 | continue |
214 | depname = dataCache.pkg_fn[pkgname] | 205 | depname = dataCache.pkg_fn[depfn] |
215 | if not self.rundep_check(fn, recipename, task, dep, depname, dataCache): | 206 | if not self.rundep_check(fn, recipename, task, dep, depname, dataCache): |
216 | continue | 207 | continue |
217 | if dep not in self.taskhash: | 208 | if dep not in self.taskhash: |
218 | bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) | 209 | bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) |
219 | data = data + self.get_unihash(dep) | 210 | data = data + self.get_unihash(dep) |
220 | self.runtaskdeps[k].append(dep) | 211 | self.runtaskdeps[tid].append(dep) |
221 | 212 | ||
222 | if task in dataCache.file_checksums[fn]: | 213 | if task in dataCache.file_checksums[fn]: |
223 | if self.checksum_cache: | 214 | if self.checksum_cache: |
@@ -225,7 +216,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
225 | else: | 216 | else: |
226 | checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename) | 217 | checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename) |
227 | for (f,cs) in checksums: | 218 | for (f,cs) in checksums: |
228 | self.file_checksum_values[k].append((f,cs)) | 219 | self.file_checksum_values[tid].append((f,cs)) |
229 | if cs: | 220 | if cs: |
230 | data = data + cs | 221 | data = data + cs |
231 | 222 | ||
@@ -235,16 +226,16 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
235 | import uuid | 226 | import uuid |
236 | taint = str(uuid.uuid4()) | 227 | taint = str(uuid.uuid4()) |
237 | data = data + taint | 228 | data = data + taint |
238 | self.taints[k] = "nostamp:" + taint | 229 | self.taints[tid] = "nostamp:" + taint |
239 | 230 | ||
240 | taint = self.read_taint(fn, task, dataCache.stamp[fn]) | 231 | taint = self.read_taint(fn, task, dataCache.stamp[fn]) |
241 | if taint: | 232 | if taint: |
242 | data = data + taint | 233 | data = data + taint |
243 | self.taints[k] = taint | 234 | self.taints[tid] = taint |
244 | logger.warning("%s is tainted from a forced run" % k) | 235 | logger.warning("%s is tainted from a forced run" % tid) |
245 | 236 | ||
246 | h = hashlib.sha256(data.encode("utf-8")).hexdigest() | 237 | h = hashlib.sha256(data.encode("utf-8")).hexdigest() |
247 | self.taskhash[k] = h | 238 | self.taskhash[tid] = h |
248 | #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task]) | 239 | #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task]) |
249 | return h | 240 | return h |
250 | 241 | ||
@@ -262,15 +253,15 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
262 | 253 | ||
263 | def dump_sigtask(self, fn, task, stampbase, runtime): | 254 | def dump_sigtask(self, fn, task, stampbase, runtime): |
264 | 255 | ||
265 | k = fn + "." + task | 256 | tid = fn + ":" + task |
266 | referencestamp = stampbase | 257 | referencestamp = stampbase |
267 | if isinstance(runtime, str) and runtime.startswith("customfile"): | 258 | if isinstance(runtime, str) and runtime.startswith("customfile"): |
268 | sigfile = stampbase | 259 | sigfile = stampbase |
269 | referencestamp = runtime[11:] | 260 | referencestamp = runtime[11:] |
270 | elif runtime and k in self.taskhash: | 261 | elif runtime and tid in self.taskhash: |
271 | sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k] | 262 | sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[tid] |
272 | else: | 263 | else: |
273 | sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k] | 264 | sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[tid] |
274 | 265 | ||
275 | bb.utils.mkdirhier(os.path.dirname(sigfile)) | 266 | bb.utils.mkdirhier(os.path.dirname(sigfile)) |
276 | 267 | ||
@@ -279,7 +270,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
279 | data['basewhitelist'] = self.basewhitelist | 270 | data['basewhitelist'] = self.basewhitelist |
280 | data['taskwhitelist'] = self.taskwhitelist | 271 | data['taskwhitelist'] = self.taskwhitelist |
281 | data['taskdeps'] = self.taskdeps[fn][task] | 272 | data['taskdeps'] = self.taskdeps[fn][task] |
282 | data['basehash'] = self.basehash[k] | 273 | data['basehash'] = self.basehash[tid] |
283 | data['gendeps'] = {} | 274 | data['gendeps'] = {} |
284 | data['varvals'] = {} | 275 | data['varvals'] = {} |
285 | data['varvals'][task] = self.lookupcache[fn][task] | 276 | data['varvals'][task] = self.lookupcache[fn][task] |
@@ -289,30 +280,30 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
289 | data['gendeps'][dep] = self.gendeps[fn][dep] | 280 | data['gendeps'][dep] = self.gendeps[fn][dep] |
290 | data['varvals'][dep] = self.lookupcache[fn][dep] | 281 | data['varvals'][dep] = self.lookupcache[fn][dep] |
291 | 282 | ||
292 | if runtime and k in self.taskhash: | 283 | if runtime and tid in self.taskhash: |
293 | data['runtaskdeps'] = self.runtaskdeps[k] | 284 | data['runtaskdeps'] = self.runtaskdeps[tid] |
294 | data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k]] | 285 | data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[tid]] |
295 | data['runtaskhashes'] = {} | 286 | data['runtaskhashes'] = {} |
296 | for dep in data['runtaskdeps']: | 287 | for dep in data['runtaskdeps']: |
297 | data['runtaskhashes'][dep] = self.get_unihash(dep) | 288 | data['runtaskhashes'][dep] = self.get_unihash(dep) |
298 | data['taskhash'] = self.taskhash[k] | 289 | data['taskhash'] = self.taskhash[tid] |
299 | 290 | ||
300 | taint = self.read_taint(fn, task, referencestamp) | 291 | taint = self.read_taint(fn, task, referencestamp) |
301 | if taint: | 292 | if taint: |
302 | data['taint'] = taint | 293 | data['taint'] = taint |
303 | 294 | ||
304 | if runtime and k in self.taints: | 295 | if runtime and tid in self.taints: |
305 | if 'nostamp:' in self.taints[k]: | 296 | if 'nostamp:' in self.taints[tid]: |
306 | data['taint'] = self.taints[k] | 297 | data['taint'] = self.taints[tid] |
307 | 298 | ||
308 | computed_basehash = calc_basehash(data) | 299 | computed_basehash = calc_basehash(data) |
309 | if computed_basehash != self.basehash[k]: | 300 | if computed_basehash != self.basehash[tid]: |
310 | bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[k], k)) | 301 | bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[tid], tid)) |
311 | if runtime and k in self.taskhash: | 302 | if runtime and tid in self.taskhash: |
312 | computed_taskhash = calc_taskhash(data) | 303 | computed_taskhash = calc_taskhash(data) |
313 | if computed_taskhash != self.taskhash[k]: | 304 | if computed_taskhash != self.taskhash[tid]: |
314 | bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[k], k)) | 305 | bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid)) |
315 | sigfile = sigfile.replace(self.taskhash[k], computed_taskhash) | 306 | sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash) |
316 | 307 | ||
317 | fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.") | 308 | fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.") |
318 | try: | 309 | try: |
@@ -332,34 +323,33 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
332 | if fn in self.taskdeps: | 323 | if fn in self.taskdeps: |
333 | for task in self.taskdeps[fn]: | 324 | for task in self.taskdeps[fn]: |
334 | tid = fn + ":" + task | 325 | tid = fn + ":" + task |
335 | (mc, _, _) = bb.runqueue.split_tid(tid) | 326 | mc = bb.runqueue.mc_from_tid(tid) |
336 | k = fn + "." + task | 327 | if tid not in self.taskhash: |
337 | if k not in self.taskhash: | ||
338 | continue | 328 | continue |
339 | if dataCaches[mc].basetaskhash[k] != self.basehash[k]: | 329 | if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]: |
340 | bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k) | 330 | bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid) |
341 | bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[k], self.basehash[k])) | 331 | bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid])) |
342 | self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True) | 332 | self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True) |
343 | 333 | ||
344 | class SignatureGeneratorBasicHash(SignatureGeneratorBasic): | 334 | class SignatureGeneratorBasicHash(SignatureGeneratorBasic): |
345 | name = "basichash" | 335 | name = "basichash" |
346 | 336 | ||
347 | def get_stampfile_hash(self, task): | 337 | def get_stampfile_hash(self, tid): |
348 | if task in self.taskhash: | 338 | if tid in self.taskhash: |
349 | return self.taskhash[task] | 339 | return self.taskhash[tid] |
350 | 340 | ||
351 | # If task is not in basehash, then error | 341 | # If task is not in basehash, then error |
352 | return self.basehash[task] | 342 | return self.basehash[tid] |
353 | 343 | ||
354 | def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False): | 344 | def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False): |
355 | if taskname != "do_setscene" and taskname.endswith("_setscene"): | 345 | if taskname != "do_setscene" and taskname.endswith("_setscene"): |
356 | k = fn + "." + taskname[:-9] | 346 | tid = fn + ":" + taskname[:-9] |
357 | else: | 347 | else: |
358 | k = fn + "." + taskname | 348 | tid = fn + ":" + taskname |
359 | if clean: | 349 | if clean: |
360 | h = "*" | 350 | h = "*" |
361 | else: | 351 | else: |
362 | h = self.get_stampfile_hash(k) | 352 | h = self.get_stampfile_hash(tid) |
363 | 353 | ||
364 | return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.') | 354 | return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.') |
365 | 355 | ||
@@ -378,32 +368,32 @@ class SignatureGeneratorUniHashMixIn(object): | |||
378 | self.server, self.method = data[:2] | 368 | self.server, self.method = data[:2] |
379 | super().set_taskdata(data[2:]) | 369 | super().set_taskdata(data[2:]) |
380 | 370 | ||
381 | def __get_task_unihash_key(self, task): | 371 | def __get_task_unihash_key(self, tid): |
382 | # TODO: The key only *needs* to be the taskhash, the task is just | 372 | # TODO: The key only *needs* to be the taskhash, the tid is just |
383 | # convenient | 373 | # convenient |
384 | return '%s:%s' % (task, self.taskhash[task]) | 374 | return '%s:%s' % (tid, self.taskhash[tid]) |
385 | 375 | ||
386 | def get_stampfile_hash(self, task): | 376 | def get_stampfile_hash(self, tid): |
387 | if task in self.taskhash: | 377 | if tid in self.taskhash: |
388 | # If a unique hash is reported, use it as the stampfile hash. This | 378 | # If a unique hash is reported, use it as the stampfile hash. This |
389 | # ensures that if a task won't be re-run if the taskhash changes, | 379 | # ensures that if a task won't be re-run if the taskhash changes, |
390 | # but it would result in the same output hash | 380 | # but it would result in the same output hash |
391 | unihash = self.unitaskhashes.get(self.__get_task_unihash_key(task), None) | 381 | unihash = self.unitaskhashes.get(self.__get_task_unihash_key(tid), None) |
392 | if unihash is not None: | 382 | if unihash is not None: |
393 | return unihash | 383 | return unihash |
394 | 384 | ||
395 | return super().get_stampfile_hash(task) | 385 | return super().get_stampfile_hash(tid) |
396 | 386 | ||
397 | def set_unihash(self, task, unihash): | 387 | def set_unihash(self, tid, unihash): |
398 | self.unitaskhashes[self.__get_task_unihash_key(task)] = unihash | 388 | self.unitaskhashes[self.__get_task_unihash_key(tid)] = unihash |
399 | 389 | ||
400 | def get_unihash(self, task): | 390 | def get_unihash(self, tid): |
401 | import urllib | 391 | import urllib |
402 | import json | 392 | import json |
403 | 393 | ||
404 | taskhash = self.taskhash[task] | 394 | taskhash = self.taskhash[tid] |
405 | 395 | ||
406 | key = self.__get_task_unihash_key(task) | 396 | key = self.__get_task_unihash_key(tid) |
407 | 397 | ||
408 | # TODO: This cache can grow unbounded. It probably only needs to keep | 398 | # TODO: This cache can grow unbounded. It probably only needs to keep |
409 | # for each task | 399 | # for each task |
@@ -428,7 +418,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
428 | 418 | ||
429 | try: | 419 | try: |
430 | url = '%s/v1/equivalent?%s' % (self.server, | 420 | url = '%s/v1/equivalent?%s' % (self.server, |
431 | urllib.parse.urlencode({'method': self.method, 'taskhash': self.taskhash[task]})) | 421 | urllib.parse.urlencode({'method': self.method, 'taskhash': self.taskhash[tid]})) |
432 | 422 | ||
433 | request = urllib.request.Request(url) | 423 | request = urllib.request.Request(url) |
434 | response = urllib.request.urlopen(request) | 424 | response = urllib.request.urlopen(request) |
@@ -441,9 +431,9 @@ class SignatureGeneratorUniHashMixIn(object): | |||
441 | # A unique hash equal to the taskhash is not very interesting, | 431 | # A unique hash equal to the taskhash is not very interesting, |
442 | # so it is reported it at debug level 2. If they differ, that | 432 | # so it is reported it at debug level 2. If they differ, that |
443 | # is much more interesting, so it is reported at debug level 1 | 433 | # is much more interesting, so it is reported at debug level 1 |
444 | bb.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, task, self.server)) | 434 | bb.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) |
445 | else: | 435 | else: |
446 | bb.debug(2, 'No reported unihash for %s:%s from %s' % (task, taskhash, self.server)) | 436 | bb.debug(2, 'No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) |
447 | except urllib.error.URLError as e: | 437 | except urllib.error.URLError as e: |
448 | bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | 438 | bb.warn('Failure contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
449 | except (KeyError, json.JSONDecodeError) as e: | 439 | except (KeyError, json.JSONDecodeError) as e: |
@@ -464,7 +454,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
464 | report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' | 454 | report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' |
465 | tempdir = d.getVar('T') | 455 | tempdir = d.getVar('T') |
466 | fn = d.getVar('BB_FILENAME') | 456 | fn = d.getVar('BB_FILENAME') |
467 | key = fn + '.do_' + task + ':' + taskhash | 457 | key = fn + ':do_' + task + ':' + taskhash |
468 | 458 | ||
469 | # Sanity checks | 459 | # Sanity checks |
470 | cache_unihash = self.unitaskhashes.get(key, None) | 460 | cache_unihash = self.unitaskhashes.get(key, None) |
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py index 3cf5abec70..a9b0bdb099 100644 --- a/bitbake/lib/bb/tests/data.py +++ b/bitbake/lib/bb/tests/data.py | |||
@@ -466,7 +466,7 @@ class TaskHash(unittest.TestCase): | |||
466 | tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d) | 466 | tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d) |
467 | taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, set(), "somefile") | 467 | taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, set(), "somefile") |
468 | bb.warn(str(lookupcache)) | 468 | bb.warn(str(lookupcache)) |
469 | return basehash["somefile." + taskname] | 469 | return basehash["somefile:" + taskname] |
470 | 470 | ||
471 | d = bb.data.init() | 471 | d = bb.data.init() |
472 | d.setVar("__BBTASKS", ["mytask"]) | 472 | d.setVar("__BBTASKS", ["mytask"]) |