diff options
Diffstat (limited to 'meta/lib/oe/sstatesig.py')
-rw-r--r-- | meta/lib/oe/sstatesig.py | 289 |
1 files changed, 210 insertions, 79 deletions
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index 6cd6e11acc..ef687f5d41 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -1,9 +1,13 @@ | |||
1 | # | 1 | # |
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
2 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
3 | # | 5 | # |
6 | import bb.parse | ||
4 | import bb.siggen | 7 | import bb.siggen |
5 | import bb.runqueue | 8 | import bb.runqueue |
6 | import oe | 9 | import oe |
10 | import netrc | ||
7 | 11 | ||
8 | def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches): | 12 | def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches): |
9 | # Return True if we should keep the dependency, False to drop it | 13 | # Return True if we should keep the dependency, False to drop it |
@@ -28,6 +32,12 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches): | |||
28 | depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep) | 32 | depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep) |
29 | mc, _ = bb.runqueue.split_mc(fn) | 33 | mc, _ = bb.runqueue.split_mc(fn) |
30 | 34 | ||
35 | # We can skip the rm_work task signature to avoid running the task | ||
36 | # when we remove some tasks from the dependencie chain | ||
37 | # i.e INHERIT:remove = "create-spdx" will trigger the do_rm_work | ||
38 | if task == "do_rm_work": | ||
39 | return False | ||
40 | |||
31 | # (Almost) always include our own inter-task dependencies (unless it comes | 41 | # (Almost) always include our own inter-task dependencies (unless it comes |
32 | # from a mcdepends). The exception is the special | 42 | # from a mcdepends). The exception is the special |
33 | # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass. | 43 | # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass. |
@@ -59,7 +69,7 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches): | |||
59 | return False | 69 | return False |
60 | 70 | ||
61 | # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum | 71 | # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum |
62 | # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum | 72 | # if we're just doing an RRECOMMENDS:xxx = "kernel-module-*", not least because the checksum |
63 | # is machine specific. | 73 | # is machine specific. |
64 | # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes) | 74 | # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes) |
65 | # and we reccomend a kernel-module, we exclude the dependency. | 75 | # and we reccomend a kernel-module, we exclude the dependency. |
@@ -84,14 +94,13 @@ def sstate_lockedsigs(d): | |||
84 | sigs[pn][task] = [h, siggen_lockedsigs_var] | 94 | sigs[pn][task] = [h, siggen_lockedsigs_var] |
85 | return sigs | 95 | return sigs |
86 | 96 | ||
87 | class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): | 97 | def lockedsigs_unihashmap(d): |
88 | name = "OEBasic" | 98 | unihashmap = {} |
89 | def init_rundepcheck(self, data): | 99 | data = (d.getVar("SIGGEN_UNIHASHMAP") or "").split() |
90 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() | 100 | for entry in data: |
91 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() | 101 | pn, task, taskhash, unihash = entry.split(":") |
92 | pass | 102 | unihashmap[(pn, task)] = (taskhash, unihash) |
93 | def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None): | 103 | return unihashmap |
94 | return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches) | ||
95 | 104 | ||
96 | class SignatureGeneratorOEBasicHashMixIn(object): | 105 | class SignatureGeneratorOEBasicHashMixIn(object): |
97 | supports_multiconfig_datacaches = True | 106 | supports_multiconfig_datacaches = True |
@@ -100,15 +109,17 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
100 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() | 109 | self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() |
101 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() | 110 | self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() |
102 | self.lockedsigs = sstate_lockedsigs(data) | 111 | self.lockedsigs = sstate_lockedsigs(data) |
112 | self.unihashmap = lockedsigs_unihashmap(data) | ||
103 | self.lockedhashes = {} | 113 | self.lockedhashes = {} |
104 | self.lockedpnmap = {} | 114 | self.lockedpnmap = {} |
105 | self.lockedhashfn = {} | 115 | self.lockedhashfn = {} |
106 | self.machine = data.getVar("MACHINE") | 116 | self.machine = data.getVar("MACHINE") |
107 | self.mismatch_msgs = [] | 117 | self.mismatch_msgs = [] |
118 | self.mismatch_number = 0 | ||
119 | self.lockedsigs_msgs = "" | ||
108 | self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or | 120 | self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or |
109 | "").split() | 121 | "").split() |
110 | self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } | 122 | self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } |
111 | self.buildarch = data.getVar('BUILD_ARCH') | ||
112 | self._internal = False | 123 | self._internal = False |
113 | pass | 124 | pass |
114 | 125 | ||
@@ -142,18 +153,12 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
142 | super().set_taskdata(data[3:]) | 153 | super().set_taskdata(data[3:]) |
143 | 154 | ||
144 | def dump_sigs(self, dataCache, options): | 155 | def dump_sigs(self, dataCache, options): |
145 | sigfile = os.getcwd() + "/locked-sigs.inc" | 156 | if 'lockedsigs' in options: |
146 | bb.plain("Writing locked sigs to %s" % sigfile) | 157 | sigfile = os.getcwd() + "/locked-sigs.inc" |
147 | self.dump_lockedsigs(sigfile) | 158 | bb.plain("Writing locked sigs to %s" % sigfile) |
159 | self.dump_lockedsigs(sigfile) | ||
148 | return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options) | 160 | return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options) |
149 | 161 | ||
150 | def prep_taskhash(self, tid, deps, dataCaches): | ||
151 | super().prep_taskhash(tid, deps, dataCaches) | ||
152 | if hasattr(self, "extramethod"): | ||
153 | (mc, _, _, fn) = bb.runqueue.split_tid_mcfn(tid) | ||
154 | inherits = " ".join(dataCaches[mc].inherits[fn]) | ||
155 | if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1: | ||
156 | self.extramethod[tid] = ":" + self.buildarch | ||
157 | 162 | ||
158 | def get_taskhash(self, tid, deps, dataCaches): | 163 | def get_taskhash(self, tid, deps, dataCaches): |
159 | if tid in self.lockedhashes: | 164 | if tid in self.lockedhashes: |
@@ -196,6 +201,7 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
196 | #bb.warn("Using %s %s %s" % (recipename, task, h)) | 201 | #bb.warn("Using %s %s %s" % (recipename, task, h)) |
197 | 202 | ||
198 | if h != h_locked and h_locked != unihash: | 203 | if h != h_locked and h_locked != unihash: |
204 | self.mismatch_number += 1 | ||
199 | self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s' | 205 | self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s' |
200 | % (recipename, task, h, h_locked, var)) | 206 | % (recipename, task, h, h_locked, var)) |
201 | 207 | ||
@@ -210,10 +216,19 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
210 | return self.lockedhashes[tid] | 216 | return self.lockedhashes[tid] |
211 | return super().get_stampfile_hash(tid) | 217 | return super().get_stampfile_hash(tid) |
212 | 218 | ||
213 | def get_unihash(self, tid): | 219 | def get_cached_unihash(self, tid): |
214 | if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: | 220 | if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: |
215 | return self.lockedhashes[tid] | 221 | return self.lockedhashes[tid] |
216 | return super().get_unihash(tid) | 222 | |
223 | (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) | ||
224 | recipename = self.lockedpnmap[fn] | ||
225 | |||
226 | if (recipename, task) in self.unihashmap: | ||
227 | taskhash, unihash = self.unihashmap[(recipename, task)] | ||
228 | if taskhash == self.taskhash[tid]: | ||
229 | return unihash | ||
230 | |||
231 | return super().get_cached_unihash(tid) | ||
217 | 232 | ||
218 | def dump_sigtask(self, fn, task, stampbase, runtime): | 233 | def dump_sigtask(self, fn, task, stampbase, runtime): |
219 | tid = fn + ":" + task | 234 | tid = fn + ":" + task |
@@ -223,17 +238,26 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
223 | 238 | ||
224 | def dump_lockedsigs(self, sigfile, taskfilter=None): | 239 | def dump_lockedsigs(self, sigfile, taskfilter=None): |
225 | types = {} | 240 | types = {} |
241 | unihashmap = {} | ||
226 | for tid in self.runtaskdeps: | 242 | for tid in self.runtaskdeps: |
243 | # Bitbake changed this to a tuple in newer versions | ||
244 | if isinstance(tid, tuple): | ||
245 | tid = tid[1] | ||
227 | if taskfilter: | 246 | if taskfilter: |
228 | if not tid in taskfilter: | 247 | if not tid in taskfilter: |
229 | continue | 248 | continue |
230 | fn = bb.runqueue.fn_from_tid(tid) | 249 | (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) |
231 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] | 250 | t = self.lockedhashfn[fn].split(" ")[1].split(":")[5] |
232 | t = 't-' + t.replace('_', '-') | 251 | t = 't-' + t.replace('_', '-') |
233 | if t not in types: | 252 | if t not in types: |
234 | types[t] = [] | 253 | types[t] = [] |
235 | types[t].append(tid) | 254 | types[t].append(tid) |
236 | 255 | ||
256 | taskhash = self.taskhash[tid] | ||
257 | unihash = self.get_unihash(tid) | ||
258 | if taskhash != unihash: | ||
259 | unihashmap[tid] = " " + self.lockedpnmap[fn] + ":" + task + ":" + taskhash + ":" + unihash | ||
260 | |||
237 | with open(sigfile, "w") as f: | 261 | with open(sigfile, "w") as f: |
238 | l = sorted(types) | 262 | l = sorted(types) |
239 | for t in l: | 263 | for t in l: |
@@ -246,15 +270,31 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
246 | continue | 270 | continue |
247 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") | 271 | f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") |
248 | f.write(' "\n') | 272 | f.write(' "\n') |
249 | f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l))) | 273 | f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"\n' % (self.machine, " ".join(l))) |
274 | f.write('SIGGEN_UNIHASHMAP += "\\\n') | ||
275 | sortedtid = sorted(unihashmap, key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)]) | ||
276 | for tid in sortedtid: | ||
277 | f.write(unihashmap[tid] + " \\\n") | ||
278 | f.write(' "\n') | ||
279 | |||
280 | def dump_siglist(self, sigfile, path_prefix_strip=None): | ||
281 | def strip_fn(fn): | ||
282 | nonlocal path_prefix_strip | ||
283 | if not path_prefix_strip: | ||
284 | return fn | ||
285 | |||
286 | fn_exp = fn.split(":") | ||
287 | if fn_exp[-1].startswith(path_prefix_strip): | ||
288 | fn_exp[-1] = fn_exp[-1][len(path_prefix_strip):] | ||
289 | |||
290 | return ":".join(fn_exp) | ||
250 | 291 | ||
251 | def dump_siglist(self, sigfile): | ||
252 | with open(sigfile, "w") as f: | 292 | with open(sigfile, "w") as f: |
253 | tasks = [] | 293 | tasks = [] |
254 | for taskitem in self.taskhash: | 294 | for taskitem in self.taskhash: |
255 | (fn, task) = taskitem.rsplit(":", 1) | 295 | (fn, task) = taskitem.rsplit(":", 1) |
256 | pn = self.lockedpnmap[fn] | 296 | pn = self.lockedpnmap[fn] |
257 | tasks.append((pn, task, fn, self.taskhash[taskitem])) | 297 | tasks.append((pn, task, strip_fn(fn), self.taskhash[taskitem])) |
258 | for (pn, task, fn, taskhash) in sorted(tasks): | 298 | for (pn, task, fn, taskhash) in sorted(tasks): |
259 | f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash)) | 299 | f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash)) |
260 | 300 | ||
@@ -262,6 +302,15 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
262 | warn_msgs = [] | 302 | warn_msgs = [] |
263 | error_msgs = [] | 303 | error_msgs = [] |
264 | sstate_missing_msgs = [] | 304 | sstate_missing_msgs = [] |
305 | info_msgs = None | ||
306 | |||
307 | if self.lockedsigs: | ||
308 | if len(self.lockedsigs) > 10: | ||
309 | self.lockedsigs_msgs = "There are %s recipes with locked tasks (%s task(s) have non matching signature)" % (len(self.lockedsigs), self.mismatch_number) | ||
310 | else: | ||
311 | self.lockedsigs_msgs = "The following recipes have locked tasks:" | ||
312 | for pn in self.lockedsigs: | ||
313 | self.lockedsigs_msgs += " %s" % (pn) | ||
265 | 314 | ||
266 | for tid in sq_data['hash']: | 315 | for tid in sq_data['hash']: |
267 | if tid not in found: | 316 | if tid not in found: |
@@ -274,7 +323,9 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
274 | % (pn, taskname, sq_data['hash'][tid])) | 323 | % (pn, taskname, sq_data['hash'][tid])) |
275 | 324 | ||
276 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") | 325 | checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") |
277 | if checklevel == 'warn': | 326 | if checklevel == 'info': |
327 | info_msgs = self.lockedsigs_msgs | ||
328 | if checklevel == 'warn' or checklevel == 'info': | ||
278 | warn_msgs += self.mismatch_msgs | 329 | warn_msgs += self.mismatch_msgs |
279 | elif checklevel == 'error': | 330 | elif checklevel == 'error': |
280 | error_msgs += self.mismatch_msgs | 331 | error_msgs += self.mismatch_msgs |
@@ -285,6 +336,8 @@ class SignatureGeneratorOEBasicHashMixIn(object): | |||
285 | elif checklevel == 'error': | 336 | elif checklevel == 'error': |
286 | error_msgs += sstate_missing_msgs | 337 | error_msgs += sstate_missing_msgs |
287 | 338 | ||
339 | if info_msgs: | ||
340 | bb.note(info_msgs) | ||
288 | if warn_msgs: | 341 | if warn_msgs: |
289 | bb.warn("\n".join(warn_msgs)) | 342 | bb.warn("\n".join(warn_msgs)) |
290 | if error_msgs: | 343 | if error_msgs: |
@@ -304,9 +357,20 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge | |||
304 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') | 357 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') |
305 | if not self.method: | 358 | if not self.method: |
306 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") | 359 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") |
360 | self.username = data.getVar("BB_HASHSERVE_USERNAME") | ||
361 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") | ||
362 | if not self.username or not self.password: | ||
363 | try: | ||
364 | n = netrc.netrc() | ||
365 | auth = n.authenticators(self.server) | ||
366 | if auth is not None: | ||
367 | self.username, _, self.password = auth | ||
368 | except FileNotFoundError: | ||
369 | pass | ||
370 | except netrc.NetrcParseError as e: | ||
371 | bb.warn("Error parsing %s:%s: %s" % (e.filename, str(e.lineno), e.msg)) | ||
307 | 372 | ||
308 | # Insert these classes into siggen's namespace so it can see and select them | 373 | # Insert these classes into siggen's namespace so it can see and select them |
309 | bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic | ||
310 | bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash | 374 | bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash |
311 | bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash | 375 | bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash |
312 | 376 | ||
@@ -320,14 +384,14 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
320 | if not taskname: | 384 | if not taskname: |
321 | # We have to derive pn and taskname | 385 | # We have to derive pn and taskname |
322 | key = pn | 386 | key = pn |
323 | splitit = key.split('.bb:') | 387 | if key.startswith("mc:"): |
324 | taskname = splitit[1] | 388 | # mc:<mc>:<pn>:<task> |
325 | pn = os.path.basename(splitit[0]).split('_')[0] | 389 | _, _, pn, taskname = key.split(':', 3) |
326 | if key.startswith('virtual:native:'): | 390 | else: |
327 | pn = pn + '-native' | 391 | # <pn>:<task> |
392 | pn, taskname = key.split(':', 1) | ||
328 | 393 | ||
329 | hashfiles = {} | 394 | hashfiles = {} |
330 | filedates = {} | ||
331 | 395 | ||
332 | def get_hashval(siginfo): | 396 | def get_hashval(siginfo): |
333 | if siginfo.endswith('.siginfo'): | 397 | if siginfo.endswith('.siginfo'): |
@@ -335,6 +399,15 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
335 | else: | 399 | else: |
336 | return siginfo.rpartition('.')[2] | 400 | return siginfo.rpartition('.')[2] |
337 | 401 | ||
402 | def get_time(fullpath): | ||
403 | # NFS can end up in a weird state where the file exists but has no stat info. | ||
404 | # If that happens, we assume it doesn't acutally exist and show a warning | ||
405 | try: | ||
406 | return os.stat(fullpath).st_mtime | ||
407 | except FileNotFoundError: | ||
408 | bb.warn("Could not obtain mtime for {}".format(fullpath)) | ||
409 | return None | ||
410 | |||
338 | # First search in stamps dir | 411 | # First search in stamps dir |
339 | localdata = d.createCopy() | 412 | localdata = d.createCopy() |
340 | localdata.setVar('MULTIMACH_TARGET_SYS', '*') | 413 | localdata.setVar('MULTIMACH_TARGET_SYS', '*') |
@@ -346,28 +419,32 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
346 | if pn.startswith("gcc-source"): | 419 | if pn.startswith("gcc-source"): |
347 | # gcc-source shared workdir is a special case :( | 420 | # gcc-source shared workdir is a special case :( |
348 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") | 421 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") |
422 | elif pn.startswith("llvm-project-source"): | ||
423 | # llvm-project-source shared workdir is also a special case :*( | ||
424 | stamp = localdata.expand("${STAMPS_DIR}/work-shared/llvm-project-source-${PV}-${PR}") | ||
349 | 425 | ||
350 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) | 426 | filespec = '%s.%s.sigdata.*' % (stamp, taskname) |
351 | foundall = False | 427 | foundall = False |
352 | import glob | 428 | import glob |
429 | bb.debug(1, "Calling glob.glob on {}".format(filespec)) | ||
353 | for fullpath in glob.glob(filespec): | 430 | for fullpath in glob.glob(filespec): |
354 | match = False | 431 | match = False |
355 | if taskhashlist: | 432 | if taskhashlist: |
356 | for taskhash in taskhashlist: | 433 | for taskhash in taskhashlist: |
357 | if fullpath.endswith('.%s' % taskhash): | 434 | if fullpath.endswith('.%s' % taskhash): |
358 | hashfiles[taskhash] = fullpath | 435 | mtime = get_time(fullpath) |
436 | if mtime: | ||
437 | hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
359 | if len(hashfiles) == len(taskhashlist): | 438 | if len(hashfiles) == len(taskhashlist): |
360 | foundall = True | 439 | foundall = True |
361 | break | 440 | break |
362 | else: | 441 | else: |
363 | try: | ||
364 | filedates[fullpath] = os.stat(fullpath).st_mtime | ||
365 | except OSError: | ||
366 | continue | ||
367 | hashval = get_hashval(fullpath) | 442 | hashval = get_hashval(fullpath) |
368 | hashfiles[hashval] = fullpath | 443 | mtime = get_time(fullpath) |
444 | if mtime: | ||
445 | hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':mtime} | ||
369 | 446 | ||
370 | if not taskhashlist or (len(filedates) < 2 and not foundall): | 447 | if not taskhashlist or (len(hashfiles) < 2 and not foundall): |
371 | # That didn't work, look in sstate-cache | 448 | # That didn't work, look in sstate-cache |
372 | hashes = taskhashlist or ['?' * 64] | 449 | hashes = taskhashlist or ['?' * 64] |
373 | localdata = bb.data.createCopy(d) | 450 | localdata = bb.data.createCopy(d) |
@@ -376,35 +453,34 @@ def find_siginfo(pn, taskname, taskhashlist, d): | |||
376 | localdata.setVar('TARGET_VENDOR', '*') | 453 | localdata.setVar('TARGET_VENDOR', '*') |
377 | localdata.setVar('TARGET_OS', '*') | 454 | localdata.setVar('TARGET_OS', '*') |
378 | localdata.setVar('PN', pn) | 455 | localdata.setVar('PN', pn) |
456 | # gcc-source is a special case, same as with local stamps above | ||
457 | if pn.startswith("gcc-source"): | ||
458 | localdata.setVar('PN', "gcc") | ||
379 | localdata.setVar('PV', '*') | 459 | localdata.setVar('PV', '*') |
380 | localdata.setVar('PR', '*') | 460 | localdata.setVar('PR', '*') |
381 | localdata.setVar('BB_TASKHASH', hashval) | 461 | localdata.setVar('BB_TASKHASH', hashval) |
462 | localdata.setVar('SSTATE_CURRTASK', taskname[3:]) | ||
382 | swspec = localdata.getVar('SSTATE_SWSPEC') | 463 | swspec = localdata.getVar('SSTATE_SWSPEC') |
383 | if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: | 464 | if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: |
384 | localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') | 465 | localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') |
385 | elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: | 466 | elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: |
386 | localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") | 467 | localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") |
387 | sstatename = taskname[3:] | 468 | filespec = '%s.siginfo' % localdata.getVar('SSTATE_PKG') |
388 | filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename) | ||
389 | 469 | ||
470 | bb.debug(1, "Calling glob.glob on {}".format(filespec)) | ||
390 | matchedfiles = glob.glob(filespec) | 471 | matchedfiles = glob.glob(filespec) |
391 | for fullpath in matchedfiles: | 472 | for fullpath in matchedfiles: |
392 | actual_hashval = get_hashval(fullpath) | 473 | actual_hashval = get_hashval(fullpath) |
393 | if actual_hashval in hashfiles: | 474 | if actual_hashval in hashfiles: |
394 | continue | 475 | continue |
395 | hashfiles[hashval] = fullpath | 476 | mtime = get_time(fullpath) |
396 | if not taskhashlist: | 477 | if mtime: |
397 | try: | 478 | hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':mtime} |
398 | filedates[fullpath] = os.stat(fullpath).st_mtime | ||
399 | except: | ||
400 | continue | ||
401 | 479 | ||
402 | if taskhashlist: | 480 | return hashfiles |
403 | return hashfiles | ||
404 | else: | ||
405 | return filedates | ||
406 | 481 | ||
407 | bb.siggen.find_siginfo = find_siginfo | 482 | bb.siggen.find_siginfo = find_siginfo |
483 | bb.siggen.find_siginfo_version = 2 | ||
408 | 484 | ||
409 | 485 | ||
410 | def sstate_get_manifest_filename(task, d): | 486 | def sstate_get_manifest_filename(task, d): |
@@ -418,6 +494,7 @@ def sstate_get_manifest_filename(task, d): | |||
418 | d2.setVar("SSTATE_MANMACH", extrainf) | 494 | d2.setVar("SSTATE_MANMACH", extrainf) |
419 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) | 495 | return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) |
420 | 496 | ||
497 | @bb.parse.vardepsexclude("BBEXTENDCURR", "BBEXTENDVARIANT", "OVERRIDES", "PACKAGE_EXTRA_ARCHS") | ||
421 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): | 498 | def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): |
422 | d2 = d | 499 | d2 = d |
423 | variant = '' | 500 | variant = '' |
@@ -440,7 +517,7 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): | |||
440 | elif "-cross-canadian" in taskdata: | 517 | elif "-cross-canadian" in taskdata: |
441 | pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"] | 518 | pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"] |
442 | elif "-cross-" in taskdata: | 519 | elif "-cross-" in taskdata: |
443 | pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"] | 520 | pkgarchs = ["${BUILD_ARCH}"] |
444 | elif "-crosssdk" in taskdata: | 521 | elif "-crosssdk" in taskdata: |
445 | pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"] | 522 | pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"] |
446 | else: | 523 | else: |
@@ -449,11 +526,15 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache): | |||
449 | pkgarchs.append('allarch') | 526 | pkgarchs.append('allarch') |
450 | pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}') | 527 | pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}') |
451 | 528 | ||
529 | searched_manifests = [] | ||
530 | |||
452 | for pkgarch in pkgarchs: | 531 | for pkgarch in pkgarchs: |
453 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname)) | 532 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname)) |
454 | if os.path.exists(manifest): | 533 | if os.path.exists(manifest): |
455 | return manifest, d2 | 534 | return manifest, d2 |
456 | bb.error("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant)) | 535 | searched_manifests.append(manifest) |
536 | bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s" | ||
537 | % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests))) | ||
457 | return None, d2 | 538 | return None, d2 |
458 | 539 | ||
459 | def OEOuthashBasic(path, sigfile, task, d): | 540 | def OEOuthashBasic(path, sigfile, task, d): |
@@ -467,6 +548,8 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
467 | import stat | 548 | import stat |
468 | import pwd | 549 | import pwd |
469 | import grp | 550 | import grp |
551 | import re | ||
552 | import fnmatch | ||
470 | 553 | ||
471 | def update_hash(s): | 554 | def update_hash(s): |
472 | s = s.encode('utf-8') | 555 | s = s.encode('utf-8') |
@@ -476,20 +559,38 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
476 | 559 | ||
477 | h = hashlib.sha256() | 560 | h = hashlib.sha256() |
478 | prev_dir = os.getcwd() | 561 | prev_dir = os.getcwd() |
562 | corebase = d.getVar("COREBASE") | ||
563 | tmpdir = d.getVar("TMPDIR") | ||
479 | include_owners = os.environ.get('PSEUDO_DISABLED') == '0' | 564 | include_owners = os.environ.get('PSEUDO_DISABLED') == '0' |
480 | if "package_write_" in task or task == "package_qa": | 565 | if "package_write_" in task or task == "package_qa": |
481 | include_owners = False | 566 | include_owners = False |
482 | include_timestamps = False | 567 | include_timestamps = False |
568 | include_root = True | ||
483 | if task == "package": | 569 | if task == "package": |
484 | include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1' | 570 | include_timestamps = True |
485 | extra_content = d.getVar('HASHEQUIV_HASH_VERSION') | 571 | include_root = False |
572 | source_date_epoch = float(d.getVar("SOURCE_DATE_EPOCH")) | ||
573 | hash_version = d.getVar('HASHEQUIV_HASH_VERSION') | ||
574 | extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA") | ||
575 | |||
576 | filemaps = {} | ||
577 | for m in (d.getVar('SSTATE_HASHEQUIV_FILEMAP') or '').split(): | ||
578 | entry = m.split(":") | ||
579 | if len(entry) != 3 or entry[0] != task: | ||
580 | continue | ||
581 | filemaps.setdefault(entry[1], []) | ||
582 | filemaps[entry[1]].append(entry[2]) | ||
486 | 583 | ||
487 | try: | 584 | try: |
488 | os.chdir(path) | 585 | os.chdir(path) |
586 | basepath = os.path.normpath(path) | ||
489 | 587 | ||
490 | update_hash("OEOuthashBasic\n") | 588 | update_hash("OEOuthashBasic\n") |
491 | if extra_content: | 589 | if hash_version: |
492 | update_hash(extra_content + "\n") | 590 | update_hash(hash_version + "\n") |
591 | |||
592 | if extra_sigdata: | ||
593 | update_hash(extra_sigdata + "\n") | ||
493 | 594 | ||
494 | # It is only currently useful to get equivalent hashes for things that | 595 | # It is only currently useful to get equivalent hashes for things that |
495 | # can be restored from sstate. Since the sstate object is named using | 596 | # can be restored from sstate. Since the sstate object is named using |
@@ -534,32 +635,37 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
534 | else: | 635 | else: |
535 | add_perm(stat.S_IXUSR, 'x') | 636 | add_perm(stat.S_IXUSR, 'x') |
536 | 637 | ||
537 | add_perm(stat.S_IRGRP, 'r') | 638 | if include_owners: |
538 | add_perm(stat.S_IWGRP, 'w') | 639 | # Group/other permissions are only relevant in pseudo context |
539 | if stat.S_ISGID & s.st_mode: | 640 | add_perm(stat.S_IRGRP, 'r') |
540 | add_perm(stat.S_IXGRP, 's', 'S') | 641 | add_perm(stat.S_IWGRP, 'w') |
541 | else: | 642 | if stat.S_ISGID & s.st_mode: |
542 | add_perm(stat.S_IXGRP, 'x') | 643 | add_perm(stat.S_IXGRP, 's', 'S') |
644 | else: | ||
645 | add_perm(stat.S_IXGRP, 'x') | ||
543 | 646 | ||
544 | add_perm(stat.S_IROTH, 'r') | 647 | add_perm(stat.S_IROTH, 'r') |
545 | add_perm(stat.S_IWOTH, 'w') | 648 | add_perm(stat.S_IWOTH, 'w') |
546 | if stat.S_ISVTX & s.st_mode: | 649 | if stat.S_ISVTX & s.st_mode: |
547 | update_hash('t') | 650 | update_hash('t') |
548 | else: | 651 | else: |
549 | add_perm(stat.S_IXOTH, 'x') | 652 | add_perm(stat.S_IXOTH, 'x') |
550 | 653 | ||
551 | if include_owners: | ||
552 | try: | 654 | try: |
553 | update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name) | 655 | update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name) |
554 | update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name) | 656 | update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name) |
555 | except KeyError as e: | 657 | except KeyError as e: |
556 | bb.warn("KeyError in %s" % path) | ||
557 | msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match " | 658 | msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match " |
558 | "any user/group on target. This may be due to host contamination." % (e, path, s.st_uid, s.st_gid)) | 659 | "any user/group on target. This may be due to host contamination." % |
660 | (e, os.path.abspath(path), s.st_uid, s.st_gid)) | ||
559 | raise Exception(msg).with_traceback(e.__traceback__) | 661 | raise Exception(msg).with_traceback(e.__traceback__) |
560 | 662 | ||
561 | if include_timestamps: | 663 | if include_timestamps: |
562 | update_hash(" %10d" % s.st_mtime) | 664 | # Need to clamp to SOURCE_DATE_EPOCH |
665 | if s.st_mtime > source_date_epoch: | ||
666 | update_hash(" %10d" % source_date_epoch) | ||
667 | else: | ||
668 | update_hash(" %10d" % s.st_mtime) | ||
563 | 669 | ||
564 | update_hash(" ") | 670 | update_hash(" ") |
565 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): | 671 | if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode): |
@@ -567,8 +673,13 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
567 | else: | 673 | else: |
568 | update_hash(" " * 9) | 674 | update_hash(" " * 9) |
569 | 675 | ||
676 | filterfile = False | ||
677 | for entry in filemaps: | ||
678 | if fnmatch.fnmatch(path, entry): | ||
679 | filterfile = True | ||
680 | |||
570 | update_hash(" ") | 681 | update_hash(" ") |
571 | if stat.S_ISREG(s.st_mode): | 682 | if stat.S_ISREG(s.st_mode) and not filterfile: |
572 | update_hash("%10d" % s.st_size) | 683 | update_hash("%10d" % s.st_size) |
573 | else: | 684 | else: |
574 | update_hash(" " * 10) | 685 | update_hash(" " * 10) |
@@ -577,9 +688,24 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
577 | fh = hashlib.sha256() | 688 | fh = hashlib.sha256() |
578 | if stat.S_ISREG(s.st_mode): | 689 | if stat.S_ISREG(s.st_mode): |
579 | # Hash file contents | 690 | # Hash file contents |
580 | with open(path, 'rb') as d: | 691 | if filterfile: |
581 | for chunk in iter(lambda: d.read(4096), b""): | 692 | # Need to ignore paths in crossscripts and postinst-useradd files. |
693 | with open(path, 'rb') as d: | ||
694 | chunk = d.read() | ||
695 | chunk = chunk.replace(bytes(basepath, encoding='utf8'), b'') | ||
696 | for entry in filemaps: | ||
697 | if not fnmatch.fnmatch(path, entry): | ||
698 | continue | ||
699 | for r in filemaps[entry]: | ||
700 | if r.startswith("regex-"): | ||
701 | chunk = re.sub(bytes(r[6:], encoding='utf8'), b'', chunk) | ||
702 | else: | ||
703 | chunk = chunk.replace(bytes(r, encoding='utf8'), b'') | ||
582 | fh.update(chunk) | 704 | fh.update(chunk) |
705 | else: | ||
706 | with open(path, 'rb') as d: | ||
707 | for chunk in iter(lambda: d.read(4096), b""): | ||
708 | fh.update(chunk) | ||
583 | update_hash(fh.hexdigest()) | 709 | update_hash(fh.hexdigest()) |
584 | else: | 710 | else: |
585 | update_hash(" " * len(fh.hexdigest())) | 711 | update_hash(" " * len(fh.hexdigest())) |
@@ -592,11 +718,16 @@ def OEOuthashBasic(path, sigfile, task, d): | |||
592 | update_hash("\n") | 718 | update_hash("\n") |
593 | 719 | ||
594 | # Process this directory and all its child files | 720 | # Process this directory and all its child files |
595 | process(root) | 721 | if include_root or root != ".": |
722 | process(root) | ||
596 | for f in files: | 723 | for f in files: |
597 | if f == 'fixmepath': | 724 | if f == 'fixmepath': |
598 | continue | 725 | continue |
599 | process(os.path.join(root, f)) | 726 | process(os.path.join(root, f)) |
727 | |||
728 | for dir in dirs: | ||
729 | if os.path.islink(os.path.join(root, dir)): | ||
730 | process(os.path.join(root, dir)) | ||
600 | finally: | 731 | finally: |
601 | os.chdir(prev_dir) | 732 | os.chdir(prev_dir) |
602 | 733 | ||