diff options
Diffstat (limited to 'meta/lib/oe/sstatesig.py')
-rw-r--r-- | meta/lib/oe/sstatesig.py | 66 |
1 files changed, 58 insertions, 8 deletions
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index a46e5502ab..ef687f5d41 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -3,6 +3,7 @@ | |||
3 | # | 3 | # |
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | import bb.parse | ||
6 | import bb.siggen | 7 | import bb.siggen |
7 | import bb.runqueue | 8 | import bb.runqueue |
8 | import oe | 9 | import oe |
@@ -93,6 +94,14 @@ def sstate_lockedsigs(d): | |||
93 | sigs[pn][task] = [h, siggen_lockedsigs_var] | 94 | sigs[pn][task] = [h, siggen_lockedsigs_var] |
94 | return sigs | 95 | return sigs |
95 | 96 | ||
97 | def lockedsigs_unihashmap(d): | ||
98 | unihashmap = {} | ||
99 | data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split() | ||
100 | for entry in data: | ||
101 | pn, task, taskhash, unihash = entry.split(":") | ||
102 | unihashmap[(pn, task)] = (taskhash, unihash) | ||
103 | return unihashmap | ||
104 | |||
96 | class SignatureGeneratorOEBasicHashMixIn(object): | 105 | class SignatureGeneratorOEBasicHashMixIn(object): |
97 | supports_multiconfig_datacaches = True | 106 | supports_multiconfig_datacaches = True |
98 | 107 | ||
@@ -100,6 +109,7 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
100 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() | 109 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() |
101 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() | 110 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() |
102 | self.lockedsigs = sstate_lockedsigs(data) | 111 | self.lockedsigs = sstate_lockedsigs(data) |
112 | self.unihashmap = lockedsigs_unihashmap(data) | ||
103 | self.lockedhashes = {} | 113 | self.lockedhashes = {} |
104 | self.lockedpnmap = {} | 114 | self.lockedpnmap = {} |
105 | self.lockedhashfn = {} | 115 | self.lockedhashfn = {} |
@@ -209,6 +219,15 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
209 | def get_cached_unihash(self, tid): | 219 | def get_cached_unihash(self, tid): |
210 | if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: | 220 | if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: |
211 | return self.lockedhashes[tid] | 221 | return self.lockedhashes[tid] |
222 | |||
223 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | ||
224 | recipename = self.lockedpnmap[fn] | ||
225 | |||
226 | if (recipename, task) in self.unihashmap: | ||
227 | taskhash, unihash = self.unihashmap[(recipename, task)] | ||
228 | if taskhash == self.taskhash[tid]: | ||
229 | return unihash | ||
230 | |||
212 | return super().get_cached_unihash(tid) | 231 | return super().get_cached_unihash(tid) |
213 | 232 | ||
214 | def dump_sigtask(self, fn, task, stampbase, runtime): | 233 | def dump_sigtask(self, fn, task, stampbase, runtime): |
@@ -219,6 +238,7 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
219 | 238 | ||
220 | def dump_lockedsigs(self, sigfile, taskfilter=None): | 239 | def dump_lockedsigs(self, sigfile, taskfilter=None): |
221 | types = {} | 240 | types = {} |
241 | unihashmap = {} | ||
222 | for tid in self.runtaskdeps: | 242 | for tid in self.runtaskdeps: |
223 | # Bitbake changed this to a tuple in newer versions | 243 | # Bitbake changed this to a tuple in newer versions |
224 | if isinstance(tid, tuple): | 244 | if isinstance(tid, tuple): |
@@ -226,13 +246,18 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
226 | if taskfilter: | 246 | if taskfilter: |
227 | if not tid in taskfilter: | 247 | if not tid in taskfilter: |
228 | continue | 248 | continue |
229 | fn = bb.runqueue.fn_from_tid(tid) | 249 | (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) |
230 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] | 250 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] |
231 | t = 't-' + t.replace('_', '-') | 251 | t = 't-' + t.replace('_', '-') |
232 | if t not in types: | 252 | if t not in types: |
233 | types[t] = [] | 253 | types[t] = [] |
234 | types[t].append(tid) | 254 | types[t].append(tid) |
235 | 255 | ||
256 | taskhash = self.taskhash[tid] | ||
257 | unihash = self.get_unihash(tid) | ||
258 | if taskhash != unihash: | ||
259 | unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash | ||
260 | |||
236 | with open(sigfile, "w") as f: | 261 | with open(sigfile, "w") as f: |
237 | l = sorted(types) | 262 | l = sorted(types) |
238 | for t in l: | 263 | for t in l: |
@@ -245,7 +270,12 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
245 | continue | 270 | continue |
246 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") | 271 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") |
247 | f.write(' "\n') | 272 | f.write(' "\n') |
248 | f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"' % (self.machine, " ".join(l))) | 273 | f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l))) |
274 | f.write('SIGGEN_UNIHASHMAP += "\\\n') | ||
275 | sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)]) | ||
276 | for tid in sortedtid: | ||
277 | f.write(unihashmap[tid] + " \\\n") | ||
278 | f.write(' "\n') | ||
249 | 279 | ||
250 | def dump_siglist(self, sigfile, path_prefix_strip=None): | 280 | def dump_siglist(self, sigfile, path_prefix_strip=None): |
251 | def strip_fn(fn): | 281 | def strip_fn(fn): |
@@ -327,7 +357,6 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge | |||
327 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') | 357 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') |
328 | if not self.method: | 358 | if not self.method: |
329 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") | 359 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") |
330 | self.max_parallel = int(data.getVar('BB_HASHSERVE_MAX_PARALLEL') or 1) | ||
331 | self.username = data.getVar("BB_HASHSERVE_USERNAME") | 360 | self.username = data.getVar("BB_HASHSERVE_USERNAME") |
332 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") | 361 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") |
333 | if not self.username or not self.password: | 362 | if not self.username or not self.password: |
@@ -371,7 +400,13 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
371 | return siginfo.rpartition('.')[2] | 400 | return siginfo.rpartition('.')[2] |
372 | 401 | ||
373 | def get_time(fullpath): | 402 | def get_time(fullpath): |
374 | return os.stat(fullpath).st_mtime | 403 | # NFS can end up in a weird state where the file exists but has no stat info. |
404 | # If that happens, we assume it doesn't acutally exist and show a warning | ||
405 | try: | ||
406 | return os.stat(fullpath).st_mtime | ||
407 | except FileNotFoundError: | ||
408 | bb.warn("Could not obtain mtime for {}".format(fullpath)) | ||
409 | return None | ||
375 | 410 | ||
376 | # First search in stamps dir | 411 | # First search in stamps dir |
377 | localdata = d.createCopy() | 412 | localdata = d.createCopy() |
@@ -384,6 +419,9 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
384 | if pn.startswith("gcc-source"): | 419 | if pn.startswith("gcc-source"): |
385 | # gcc-source shared workdir is a special case :( | 420 | # gcc-source shared workdir is a special case :( |
386 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") | 421 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") |
422 | elif pn.startswith("llvm-project-source"): | ||
423 | # llvm-project-source shared workdir is also a special case :*( | ||
424 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/llvm-project-source-${PV}-${PR}") | ||
387 | 425 | ||
388 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) | 426 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) |
389 | foundall = False | 427 | foundall = False |
@@ -394,13 +432,17 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
394 | if taskhashlist: | 432 | if taskhashlist: |
395 | for taskhash in taskhashlist: | 433 | for taskhash in taskhashlist: |
396 | if fullpath.endswith('.%s' % taskhash): | 434 | if fullpath.endswith('.%s' % taskhash): |
397 | hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)} | 435 | mtime = get_time(fullpath) |
436 | if mtime: | ||
437 | hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
398 | if len(hashfiles) == len(taskhashlist): | 438 | if len(hashfiles) == len(taskhashlist): |
399 | foundall = True | 439 | foundall = True |
400 | break | 440 | break |
401 | else: | 441 | else: |
402 | hashval = get_hashval(fullpath) | 442 | hashval = get_hashval(fullpath) |
403 | hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)} | 443 | mtime = get_time(fullpath) |
444 | if mtime: | ||
445 | hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
404 | 446 | ||
405 | if not taskhashlist or (len(hashfiles) < 2 and not foundall): | 447 | if not taskhashlist or (len(hashfiles) < 2 and not foundall): |
406 | # That didn't work, look in sstate-cache | 448 | # That didn't work, look in sstate-cache |
@@ -431,7 +473,9 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
431 | actual_hashval = get_hashval(fullpath) | 473 | actual_hashval = get_hashval(fullpath) |
432 | if actual_hashval in hashfiles: | 474 | if actual_hashval in hashfiles: |
433 | continue | 475 | continue |
434 | hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':get_time(fullpath)} | 476 | mtime = get_time(fullpath) |
477 | if mtime: | ||
478 | hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime} | ||
435 | 479 | ||
436 | return hashfiles | 480 | return hashfiles |
437 | 481 | ||
@@ -450,6 +494,7 @@ def sstate_get_manifest_filename(task, d): | |||
450 | d2.setVar("SSTATE_MANMACH", extrainf) | 494 | d2.setVar("SSTATE_MANMACH", extrainf) |
451 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) | 495 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) |
452 | 496 | ||
497 | @bb.parse.vardepsexclude("BBEXTENDCURR", "BBEXTENDVARIANT", "OVERRIDES", "PACKAGE_EXTRA_ARCHS") | ||
453 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): | 498 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): |
454 | d2 = d | 499 | d2 = d |
455 | variant = '' | 500 | variant = '' |
@@ -524,6 +569,7 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
524 | if task == "package": | 569 | if task == "package": |
525 | include_timestamps = True | 570 | include_timestamps = True |
526 | include_root = False | 571 | include_root = False |
572 | source_date_epoch = float(d.getVar("SOURCE_DATE_EPOCH")) | ||
527 | hash_version = d.getVar('HASHEQUIV_HASH_VERSION') | 573 | hash_version = d.getVar('HASHEQUIV_HASH_VERSION') |
528 | extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA") | 574 | extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA") |
529 | 575 | ||
@@ -615,7 +661,11 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
615 | raise Exception(msg).with_traceback(e.__traceback__) | 661 | raise Exception(msg).with_traceback(e.__traceback__) |
616 | 662 | ||
617 | if include_timestamps: | 663 | if include_timestamps: |
618 | update_hash(" %10d" % s.st_mtime) | 664 | # Need to clamp to SOURCE_DATE_EPOCH |
665 | if s.st_mtime > source_date_epoch: | ||
666 | update_hash(" %10d" % source_date_epoch) | ||
667 | else: | ||
668 | update_hash(" %10d" % s.st_mtime) | ||
619 | 669 | ||
620 | update_hash(" ") | 670 | update_hash(" ") |
621 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): | 671 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): |