diff options
| -rw-r--r-- | bitbake/lib/bb/runqueue.py | 8 | ||||
| -rw-r--r-- | bitbake/lib/bb/siggen.py | 78 |
2 files changed, 67 insertions, 19 deletions
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 5b7dab8d79..adb34a8cf2 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
| @@ -1190,8 +1190,9 @@ class RunQueueData: | |||
| 1190 | return len(self.runtaskentries) | 1190 | return len(self.runtaskentries) |
| 1191 | 1191 | ||
| 1192 | def prepare_task_hash(self, tid): | 1192 | def prepare_task_hash(self, tid): |
| 1193 | bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches[mc_from_tid(tid)]) | 1193 | dc = bb.parse.siggen.get_data_caches(self.dataCaches, mc_from_tid(tid)) |
| 1194 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches[mc_from_tid(tid)]) | 1194 | bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, dc) |
| 1195 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, dc) | ||
| 1195 | self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) | 1196 | self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) |
| 1196 | 1197 | ||
| 1197 | def dump_data(self): | 1198 | def dump_data(self): |
| @@ -2305,7 +2306,8 @@ class RunQueueExecute: | |||
| 2305 | if len(self.rqdata.runtaskentries[p].depends) and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): | 2306 | if len(self.rqdata.runtaskentries[p].depends) and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): |
| 2306 | continue | 2307 | continue |
| 2307 | orighash = self.rqdata.runtaskentries[tid].hash | 2308 | orighash = self.rqdata.runtaskentries[tid].hash |
| 2308 | newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches[mc_from_tid(tid)]) | 2309 | dc = bb.parse.siggen.get_data_caches(self.rqdata.dataCaches, mc_from_tid(tid)) |
| 2310 | newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, dc) | ||
| 2309 | origuni = self.rqdata.runtaskentries[tid].unihash | 2311 | origuni = self.rqdata.runtaskentries[tid].unihash |
| 2310 | newuni = bb.parse.siggen.get_unihash(tid) | 2312 | newuni = bb.parse.siggen.get_unihash(tid) |
| 2311 | # FIXME, need to check it can come from sstate at all for determinism? | 2313 | # FIXME, need to check it can come from sstate at all for determinism? |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 4c8d81c5da..872333d7fd 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
| @@ -38,6 +38,11 @@ class SignatureGenerator(object): | |||
| 38 | """ | 38 | """ |
| 39 | name = "noop" | 39 | name = "noop" |
| 40 | 40 | ||
| 41 | # If the derived class supports multiconfig datacaches, set this to True | ||
| 42 | # The default is False for backward compatibility with derived signature | ||
| 43 | # generators that do not understand multiconfig caches | ||
| 44 | supports_multiconfig_datacaches = False | ||
| 45 | |||
| 41 | def __init__(self, data): | 46 | def __init__(self, data): |
| 42 | self.basehash = {} | 47 | self.basehash = {} |
| 43 | self.taskhash = {} | 48 | self.taskhash = {} |
| @@ -58,10 +63,10 @@ class SignatureGenerator(object): | |||
| 58 | def get_unihash(self, tid): | 63 | def get_unihash(self, tid): |
| 59 | return self.taskhash[tid] | 64 | return self.taskhash[tid] |
| 60 | 65 | ||
| 61 | def prep_taskhash(self, tid, deps, dataCache): | 66 | def prep_taskhash(self, tid, deps, dataCaches): |
| 62 | return | 67 | return |
| 63 | 68 | ||
| 64 | def get_taskhash(self, tid, deps, dataCache): | 69 | def get_taskhash(self, tid, deps, dataCaches): |
| 65 | self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest() | 70 | self.taskhash[tid] = hashlib.sha256(tid.encode("utf-8")).hexdigest() |
| 66 | return self.taskhash[tid] | 71 | return self.taskhash[tid] |
| 67 | 72 | ||
| @@ -105,6 +110,38 @@ class SignatureGenerator(object): | |||
| 105 | def set_setscene_tasks(self, setscene_tasks): | 110 | def set_setscene_tasks(self, setscene_tasks): |
| 106 | return | 111 | return |
| 107 | 112 | ||
| 113 | @classmethod | ||
| 114 | def get_data_caches(cls, dataCaches, mc): | ||
| 115 | """ | ||
| 116 | This function returns the datacaches that should be passed to signature | ||
| 117 | generator functions. If the signature generator supports multiconfig | ||
| 118 | caches, the entire dictionary of data caches is sent, otherwise a | ||
| 119 | special proxy is sent that support both index access to all | ||
| 120 | multiconfigs, and also direct access for the default multiconfig. | ||
| 121 | |||
| 122 | The proxy class allows code in this class itself to always use | ||
| 123 | multiconfig aware code (to ease maintenance), but derived classes that | ||
| 124 | are unaware of multiconfig data caches can still access the default | ||
| 125 | multiconfig as expected. | ||
| 126 | |||
| 127 | Do not override this function in derived classes; it will be removed in | ||
| 128 | the future when support for multiconfig data caches is mandatory | ||
| 129 | """ | ||
| 130 | class DataCacheProxy(object): | ||
| 131 | def __init__(self): | ||
| 132 | pass | ||
| 133 | |||
| 134 | def __getitem__(self, key): | ||
| 135 | return dataCaches[key] | ||
| 136 | |||
| 137 | def __getattr__(self, name): | ||
| 138 | return getattr(dataCaches[mc], name) | ||
| 139 | |||
| 140 | if cls.supports_multiconfig_datacaches: | ||
| 141 | return dataCaches | ||
| 142 | |||
| 143 | return DataCacheProxy() | ||
| 144 | |||
| 108 | class SignatureGeneratorBasic(SignatureGenerator): | 145 | class SignatureGeneratorBasic(SignatureGenerator): |
| 109 | """ | 146 | """ |
| 110 | """ | 147 | """ |
| @@ -200,7 +237,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
| 200 | self.lookupcache = {} | 237 | self.lookupcache = {} |
| 201 | self.taskdeps = {} | 238 | self.taskdeps = {} |
| 202 | 239 | ||
| 203 | def rundep_check(self, fn, recipename, task, dep, depname, dataCache): | 240 | def rundep_check(self, fn, recipename, task, dep, depname, dataCaches): |
| 204 | # Return True if we should keep the dependency, False to drop it | 241 | # Return True if we should keep the dependency, False to drop it |
| 205 | # We only manipulate the dependencies for packages not in the whitelist | 242 | # We only manipulate the dependencies for packages not in the whitelist |
| 206 | if self.twl and not self.twl.search(recipename): | 243 | if self.twl and not self.twl.search(recipename): |
| @@ -218,37 +255,40 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
| 218 | pass | 255 | pass |
| 219 | return taint | 256 | return taint |
| 220 | 257 | ||
| 221 | def prep_taskhash(self, tid, deps, dataCache): | 258 | def prep_taskhash(self, tid, deps, dataCaches): |
| 222 | 259 | ||
| 223 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | 260 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) |
| 224 | 261 | ||
| 225 | self.basehash[tid] = dataCache.basetaskhash[tid] | 262 | self.basehash[tid] = dataCaches[mc].basetaskhash[tid] |
| 226 | self.runtaskdeps[tid] = [] | 263 | self.runtaskdeps[tid] = [] |
| 227 | self.file_checksum_values[tid] = [] | 264 | self.file_checksum_values[tid] = [] |
| 228 | recipename = dataCache.pkg_fn[fn] | 265 | recipename = dataCaches[mc].pkg_fn[fn] |
| 229 | 266 | ||
| 230 | self.tidtopn[tid] = recipename | 267 | self.tidtopn[tid] = recipename |
| 231 | 268 | ||
| 232 | for dep in sorted(deps, key=clean_basepath): | 269 | for dep in sorted(deps, key=clean_basepath): |
| 233 | (depmc, _, deptaskname, depfn) = bb.runqueue.split_tid_mcfn(dep) | 270 | (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep) |
| 234 | if mc != depmc: | 271 | depname = dataCaches[depmc].pkg_fn[depmcfn] |
| 272 | if not self.supports_multiconfig_datacaches and mc != depmc: | ||
| 273 | # If the signature generator doesn't understand multiconfig | ||
| 274 | # data caches, any dependency not in the same multiconfig must | ||
| 275 | # be skipped for backward compatibility | ||
| 235 | continue | 276 | continue |
| 236 | depname = dataCache.pkg_fn[depfn] | 277 | if not self.rundep_check(fn, recipename, task, dep, depname, dataCaches): |
| 237 | if not self.rundep_check(fn, recipename, task, dep, depname, dataCache): | ||
| 238 | continue | 278 | continue |
| 239 | if dep not in self.taskhash: | 279 | if dep not in self.taskhash: |
| 240 | bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) | 280 | bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) |
| 241 | self.runtaskdeps[tid].append(dep) | 281 | self.runtaskdeps[tid].append(dep) |
| 242 | 282 | ||
| 243 | if task in dataCache.file_checksums[fn]: | 283 | if task in dataCaches[mc].file_checksums[fn]: |
| 244 | if self.checksum_cache: | 284 | if self.checksum_cache: |
| 245 | checksums = self.checksum_cache.get_checksums(dataCache.file_checksums[fn][task], recipename, self.localdirsexclude) | 285 | checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) |
| 246 | else: | 286 | else: |
| 247 | checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename, self.localdirsexclude) | 287 | checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) |
| 248 | for (f,cs) in checksums: | 288 | for (f,cs) in checksums: |
| 249 | self.file_checksum_values[tid].append((f,cs)) | 289 | self.file_checksum_values[tid].append((f,cs)) |
| 250 | 290 | ||
| 251 | taskdep = dataCache.task_deps[fn] | 291 | taskdep = dataCaches[mc].task_deps[fn] |
| 252 | if 'nostamp' in taskdep and task in taskdep['nostamp']: | 292 | if 'nostamp' in taskdep and task in taskdep['nostamp']: |
| 253 | # Nostamp tasks need an implicit taint so that they force any dependent tasks to run | 293 | # Nostamp tasks need an implicit taint so that they force any dependent tasks to run |
| 254 | if tid in self.taints and self.taints[tid].startswith("nostamp:"): | 294 | if tid in self.taints and self.taints[tid].startswith("nostamp:"): |
| @@ -259,14 +299,14 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
| 259 | taint = str(uuid.uuid4()) | 299 | taint = str(uuid.uuid4()) |
| 260 | self.taints[tid] = "nostamp:" + taint | 300 | self.taints[tid] = "nostamp:" + taint |
| 261 | 301 | ||
| 262 | taint = self.read_taint(fn, task, dataCache.stamp[fn]) | 302 | taint = self.read_taint(fn, task, dataCaches[mc].stamp[fn]) |
| 263 | if taint: | 303 | if taint: |
| 264 | self.taints[tid] = taint | 304 | self.taints[tid] = taint |
| 265 | logger.warning("%s is tainted from a forced run" % tid) | 305 | logger.warning("%s is tainted from a forced run" % tid) |
| 266 | 306 | ||
| 267 | return | 307 | return |
| 268 | 308 | ||
| 269 | def get_taskhash(self, tid, deps, dataCache): | 309 | def get_taskhash(self, tid, deps, dataCaches): |
| 270 | 310 | ||
| 271 | data = self.basehash[tid] | 311 | data = self.basehash[tid] |
| 272 | for dep in self.runtaskdeps[tid]: | 312 | for dep in self.runtaskdeps[tid]: |
| @@ -640,6 +680,12 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG | |||
| 640 | self.server = data.getVar('BB_HASHSERVE') | 680 | self.server = data.getVar('BB_HASHSERVE') |
| 641 | self.method = "sstate_output_hash" | 681 | self.method = "sstate_output_hash" |
| 642 | 682 | ||
| 683 | # | ||
| 684 | # Dummy class used for bitbake-selftest | ||
| 685 | # | ||
| 686 | class SignatureGeneratorTestMulticonfigDepends(SignatureGeneratorBasicHash): | ||
| 687 | name = "TestMulticonfigDepends" | ||
| 688 | supports_multiconfig_datacaches = True | ||
| 643 | 689 | ||
| 644 | def dump_this_task(outfile, d): | 690 | def dump_this_task(outfile, d): |
| 645 | import bb.parse | 691 | import bb.parse |
