summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe/sstatesig.py
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oe/sstatesig.py')
-rw-r--r--meta/lib/oe/sstatesig.py237
1 files changed, 159 insertions, 78 deletions
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index 6cd6e11acc..a46e5502ab 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4import bb.siggen 6import bb.siggen
5import bb.runqueue 7import bb.runqueue
6import oe 8import oe
9import netrc
7 10
8def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches): 11def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
9 # Return True if we should keep the dependency, False to drop it 12 # Return True if we should keep the dependency, False to drop it
@@ -28,6 +31,12 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
28 depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep) 31 depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep)
29 mc, _ = bb.runqueue.split_mc(fn) 32 mc, _ = bb.runqueue.split_mc(fn)
30 33
34 # We can skip the rm_work task signature to avoid running the task
35 # when we remove some tasks from the dependencie chain
36 # i.e INHERIT:remove = "create-spdx" will trigger the do_rm_work
37 if task == "do_rm_work":
38 return False
39
31 # (Almost) always include our own inter-task dependencies (unless it comes 40 # (Almost) always include our own inter-task dependencies (unless it comes
32 # from a mcdepends). The exception is the special 41 # from a mcdepends). The exception is the special
33 # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass. 42 # do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass.
@@ -59,7 +68,7 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
59 return False 68 return False
60 69
61 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum 70 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
62 # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum 71 # if we're just doing an RRECOMMENDS:xxx = "kernel-module-*", not least because the checksum
63 # is machine specific. 72 # is machine specific.
64 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes) 73 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
65 # and we reccomend a kernel-module, we exclude the dependency. 74 # and we reccomend a kernel-module, we exclude the dependency.
@@ -84,15 +93,6 @@ def sstate_lockedsigs(d):
84 sigs[pn][task] = [h, siggen_lockedsigs_var] 93 sigs[pn][task] = [h, siggen_lockedsigs_var]
85 return sigs 94 return sigs
86 95
87class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
88 name = "OEBasic"
89 def init_rundepcheck(self, data):
90 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
91 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
92 pass
93 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches = None):
94 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCaches)
95
96class SignatureGeneratorOEBasicHashMixIn(object): 96class SignatureGeneratorOEBasicHashMixIn(object):
97 supports_multiconfig_datacaches = True 97 supports_multiconfig_datacaches = True
98 98
@@ -105,10 +105,11 @@ class SignatureGeneratorOEBasicHashMixIn(object):
105 self.lockedhashfn = {} 105 self.lockedhashfn = {}
106 self.machine = data.getVar("MACHINE") 106 self.machine = data.getVar("MACHINE")
107 self.mismatch_msgs = [] 107 self.mismatch_msgs = []
108 self.mismatch_number = 0
109 self.lockedsigs_msgs = ""
108 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or 110 self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
109 "").split() 111 "").split()
110 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } 112 self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
111 self.buildarch = data.getVar('BUILD_ARCH')
112 self._internal = False 113 self._internal = False
113 pass 114 pass
114 115
@@ -142,18 +143,12 @@ class SignatureGeneratorOEBasicHashMixIn(object):
142 super().set_taskdata(data[3:]) 143 super().set_taskdata(data[3:])
143 144
144 def dump_sigs(self, dataCache, options): 145 def dump_sigs(self, dataCache, options):
145 sigfile = os.getcwd() + "/locked-sigs.inc" 146 if 'lockedsigs' in options:
146 bb.plain("Writing locked sigs to %s" % sigfile) 147 sigfile = os.getcwd() + "/locked-sigs.inc"
147 self.dump_lockedsigs(sigfile) 148 bb.plain("Writing locked sigs to %s" % sigfile)
149 self.dump_lockedsigs(sigfile)
148 return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options) 150 return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
149 151
150 def prep_taskhash(self, tid, deps, dataCaches):
151 super().prep_taskhash(tid, deps, dataCaches)
152 if hasattr(self, "extramethod"):
153 (mc, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
154 inherits = " ".join(dataCaches[mc].inherits[fn])
155 if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
156 self.extramethod[tid] = ":" + self.buildarch
157 152
158 def get_taskhash(self, tid, deps, dataCaches): 153 def get_taskhash(self, tid, deps, dataCaches):
159 if tid in self.lockedhashes: 154 if tid in self.lockedhashes:
@@ -196,6 +191,7 @@ class SignatureGeneratorOEBasicHashMixIn(object):
196 #bb.warn("Using %s %s %s" % (recipename, task, h)) 191 #bb.warn("Using %s %s %s" % (recipename, task, h))
197 192
198 if h != h_locked and h_locked != unihash: 193 if h != h_locked and h_locked != unihash:
194 self.mismatch_number += 1
199 self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s' 195 self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
200 % (recipename, task, h, h_locked, var)) 196 % (recipename, task, h, h_locked, var))
201 197
@@ -210,10 +206,10 @@ class SignatureGeneratorOEBasicHashMixIn(object):
210 return self.lockedhashes[tid] 206 return self.lockedhashes[tid]
211 return super().get_stampfile_hash(tid) 207 return super().get_stampfile_hash(tid)
212 208
213 def get_unihash(self, tid): 209 def get_cached_unihash(self, tid):
214 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal: 210 if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
215 return self.lockedhashes[tid] 211 return self.lockedhashes[tid]
216 return super().get_unihash(tid) 212 return super().get_cached_unihash(tid)
217 213
218 def dump_sigtask(self, fn, task, stampbase, runtime): 214 def dump_sigtask(self, fn, task, stampbase, runtime):
219 tid = fn + ":" + task 215 tid = fn + ":" + task
@@ -224,6 +220,9 @@ class SignatureGeneratorOEBasicHashMixIn(object):
224 def dump_lockedsigs(self, sigfile, taskfilter=None): 220 def dump_lockedsigs(self, sigfile, taskfilter=None):
225 types = {} 221 types = {}
226 for tid in self.runtaskdeps: 222 for tid in self.runtaskdeps:
223 # Bitbake changed this to a tuple in newer versions
224 if isinstance(tid, tuple):
225 tid = tid[1]
227 if taskfilter: 226 if taskfilter:
228 if not tid in taskfilter: 227 if not tid in taskfilter:
229 continue 228 continue
@@ -246,15 +245,26 @@ class SignatureGeneratorOEBasicHashMixIn(object):
246 continue 245 continue
247 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n") 246 f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
248 f.write(' "\n') 247 f.write(' "\n')
249 f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l))) 248 f.write('SIGGEN_LOCKEDSIGS_TYPES:%s = "%s"' % (self.machine, " ".join(l)))
249
250 def dump_siglist(self, sigfile, path_prefix_strip=None):
251 def strip_fn(fn):
252 nonlocal path_prefix_strip
253 if not path_prefix_strip:
254 return fn
255
256 fn_exp = fn.split(":")
257 if fn_exp[-1].startswith(path_prefix_strip):
258 fn_exp[-1] = fn_exp[-1][len(path_prefix_strip):]
259
260 return ":".join(fn_exp)
250 261
251 def dump_siglist(self, sigfile):
252 with open(sigfile, "w") as f: 262 with open(sigfile, "w") as f:
253 tasks = [] 263 tasks = []
254 for taskitem in self.taskhash: 264 for taskitem in self.taskhash:
255 (fn, task) = taskitem.rsplit(":", 1) 265 (fn, task) = taskitem.rsplit(":", 1)
256 pn = self.lockedpnmap[fn] 266 pn = self.lockedpnmap[fn]
257 tasks.append((pn, task, fn, self.taskhash[taskitem])) 267 tasks.append((pn, task, strip_fn(fn), self.taskhash[taskitem]))
258 for (pn, task, fn, taskhash) in sorted(tasks): 268 for (pn, task, fn, taskhash) in sorted(tasks):
259 f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash)) 269 f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
260 270
@@ -262,6 +272,15 @@ class SignatureGeneratorOEBasicHashMixIn(object):
262 warn_msgs = [] 272 warn_msgs = []
263 error_msgs = [] 273 error_msgs = []
264 sstate_missing_msgs = [] 274 sstate_missing_msgs = []
275 info_msgs = None
276
277 if self.lockedsigs:
278 if len(self.lockedsigs) > 10:
279 self.lockedsigs_msgs = "There are %s recipes with locked tasks (%s task(s) have non matching signature)" % (len(self.lockedsigs), self.mismatch_number)
280 else:
281 self.lockedsigs_msgs = "The following recipes have locked tasks:"
282 for pn in self.lockedsigs:
283 self.lockedsigs_msgs += " %s" % (pn)
265 284
266 for tid in sq_data['hash']: 285 for tid in sq_data['hash']:
267 if tid not in found: 286 if tid not in found:
@@ -274,7 +293,9 @@ class SignatureGeneratorOEBasicHashMixIn(object):
274 % (pn, taskname, sq_data['hash'][tid])) 293 % (pn, taskname, sq_data['hash'][tid]))
275 294
276 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") 295 checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
277 if checklevel == 'warn': 296 if checklevel == 'info':
297 info_msgs = self.lockedsigs_msgs
298 if checklevel == 'warn' or checklevel == 'info':
278 warn_msgs += self.mismatch_msgs 299 warn_msgs += self.mismatch_msgs
279 elif checklevel == 'error': 300 elif checklevel == 'error':
280 error_msgs += self.mismatch_msgs 301 error_msgs += self.mismatch_msgs
@@ -285,6 +306,8 @@ class SignatureGeneratorOEBasicHashMixIn(object):
285 elif checklevel == 'error': 306 elif checklevel == 'error':
286 error_msgs += sstate_missing_msgs 307 error_msgs += sstate_missing_msgs
287 308
309 if info_msgs:
310 bb.note(info_msgs)
288 if warn_msgs: 311 if warn_msgs:
289 bb.warn("\n".join(warn_msgs)) 312 bb.warn("\n".join(warn_msgs))
290 if error_msgs: 313 if error_msgs:
@@ -304,9 +327,21 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge
304 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') 327 self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
305 if not self.method: 328 if not self.method:
306 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") 329 bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
330 self.max_parallel = int(data.getVar('BB_HASHSERVE_MAX_PARALLEL') or 1)
331 self.username = data.getVar("BB_HASHSERVE_USERNAME")
332 self.password = data.getVar("BB_HASHSERVE_PASSWORD")
333 if not self.username or not self.password:
334 try:
335 n = netrc.netrc()
336 auth = n.authenticators(self.server)
337 if auth is not None:
338 self.username, _, self.password = auth
339 except FileNotFoundError:
340 pass
341 except netrc.NetrcParseError as e:
342 bb.warn("Error parsing %s:%s: %s" % (e.filename, str(e.lineno), e.msg))
307 343
308# Insert these classes into siggen's namespace so it can see and select them 344# Insert these classes into siggen's namespace so it can see and select them
309bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
310bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash 345bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
311bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash 346bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
312 347
@@ -320,14 +355,14 @@ def find_siginfo(pn, taskname, taskhashlist, d):
320 if not taskname: 355 if not taskname:
321 # We have to derive pn and taskname 356 # We have to derive pn and taskname
322 key = pn 357 key = pn
323 splitit = key.split('.bb:') 358 if key.startswith("mc:"):
324 taskname = splitit[1] 359 # mc:<mc>:<pn>:<task>
325 pn = os.path.basename(splitit[0]).split('_')[0] 360 _, _, pn, taskname = key.split(':', 3)
326 if key.startswith('virtual:native:'): 361 else:
327 pn = pn + '-native' 362 # <pn>:<task>
363 pn, taskname = key.split(':', 1)
328 364
329 hashfiles = {} 365 hashfiles = {}
330 filedates = {}
331 366
332 def get_hashval(siginfo): 367 def get_hashval(siginfo):
333 if siginfo.endswith('.siginfo'): 368 if siginfo.endswith('.siginfo'):
@@ -335,6 +370,9 @@ def find_siginfo(pn, taskname, taskhashlist, d):
335 else: 370 else:
336 return siginfo.rpartition('.')[2] 371 return siginfo.rpartition('.')[2]
337 372
373 def get_time(fullpath):
374 return os.stat(fullpath).st_mtime
375
338 # First search in stamps dir 376 # First search in stamps dir
339 localdata = d.createCopy() 377 localdata = d.createCopy()
340 localdata.setVar('MULTIMACH_TARGET_SYS', '*') 378 localdata.setVar('MULTIMACH_TARGET_SYS', '*')
@@ -350,24 +388,21 @@ def find_siginfo(pn, taskname, taskhashlist, d):
350 filespec = '%s.%s.sigdata.*' % (stamp, taskname) 388 filespec = '%s.%s.sigdata.*' % (stamp, taskname)
351 foundall = False 389 foundall = False
352 import glob 390 import glob
391 bb.debug(1, "Calling glob.glob on {}".format(filespec))
353 for fullpath in glob.glob(filespec): 392 for fullpath in glob.glob(filespec):
354 match = False 393 match = False
355 if taskhashlist: 394 if taskhashlist:
356 for taskhash in taskhashlist: 395 for taskhash in taskhashlist:
357 if fullpath.endswith('.%s' % taskhash): 396 if fullpath.endswith('.%s' % taskhash):
358 hashfiles[taskhash] = fullpath 397 hashfiles[taskhash] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)}
359 if len(hashfiles) == len(taskhashlist): 398 if len(hashfiles) == len(taskhashlist):
360 foundall = True 399 foundall = True
361 break 400 break
362 else: 401 else:
363 try:
364 filedates[fullpath] = os.stat(fullpath).st_mtime
365 except OSError:
366 continue
367 hashval = get_hashval(fullpath) 402 hashval = get_hashval(fullpath)
368 hashfiles[hashval] = fullpath 403 hashfiles[hashval] = {'path':fullpath, 'sstate':False, 'time':get_time(fullpath)}
369 404
370 if not taskhashlist or (len(filedates) < 2 and not foundall): 405 if not taskhashlist or (len(hashfiles) < 2 and not foundall):
371 # That didn't work, look in sstate-cache 406 # That didn't work, look in sstate-cache
372 hashes = taskhashlist or ['?' * 64] 407 hashes = taskhashlist or ['?' * 64]
373 localdata = bb.data.createCopy(d) 408 localdata = bb.data.createCopy(d)
@@ -376,35 +411,32 @@ def find_siginfo(pn, taskname, taskhashlist, d):
376 localdata.setVar('TARGET_VENDOR', '*') 411 localdata.setVar('TARGET_VENDOR', '*')
377 localdata.setVar('TARGET_OS', '*') 412 localdata.setVar('TARGET_OS', '*')
378 localdata.setVar('PN', pn) 413 localdata.setVar('PN', pn)
414 # gcc-source is a special case, same as with local stamps above
415 if pn.startswith("gcc-source"):
416 localdata.setVar('PN', "gcc")
379 localdata.setVar('PV', '*') 417 localdata.setVar('PV', '*')
380 localdata.setVar('PR', '*') 418 localdata.setVar('PR', '*')
381 localdata.setVar('BB_TASKHASH', hashval) 419 localdata.setVar('BB_TASKHASH', hashval)
420 localdata.setVar('SSTATE_CURRTASK', taskname[3:])
382 swspec = localdata.getVar('SSTATE_SWSPEC') 421 swspec = localdata.getVar('SSTATE_SWSPEC')
383 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: 422 if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec:
384 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') 423 localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}')
385 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: 424 elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn:
386 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") 425 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
387 sstatename = taskname[3:] 426 filespec = '%s.siginfo' % localdata.getVar('SSTATE_PKG')
388 filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename)
389 427
428 bb.debug(1, "Calling glob.glob on {}".format(filespec))
390 matchedfiles = glob.glob(filespec) 429 matchedfiles = glob.glob(filespec)
391 for fullpath in matchedfiles: 430 for fullpath in matchedfiles:
392 actual_hashval = get_hashval(fullpath) 431 actual_hashval = get_hashval(fullpath)
393 if actual_hashval in hashfiles: 432 if actual_hashval in hashfiles:
394 continue 433 continue
395 hashfiles[hashval] = fullpath 434 hashfiles[actual_hashval] = {'path':fullpath, 'sstate':True, 'time':get_time(fullpath)}
396 if not taskhashlist:
397 try:
398 filedates[fullpath] = os.stat(fullpath).st_mtime
399 except:
400 continue
401 435
402 if taskhashlist: 436 return hashfiles
403 return hashfiles
404 else:
405 return filedates
406 437
407bb.siggen.find_siginfo = find_siginfo 438bb.siggen.find_siginfo = find_siginfo
439bb.siggen.find_siginfo_version = 2
408 440
409 441
410def sstate_get_manifest_filename(task, d): 442def sstate_get_manifest_filename(task, d):
@@ -440,7 +472,7 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
440 elif "-cross-canadian" in taskdata: 472 elif "-cross-canadian" in taskdata:
441 pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"] 473 pkgarchs = ["${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}"]
442 elif "-cross-" in taskdata: 474 elif "-cross-" in taskdata:
443 pkgarchs = ["${BUILD_ARCH}_${TARGET_ARCH}"] 475 pkgarchs = ["${BUILD_ARCH}"]
444 elif "-crosssdk" in taskdata: 476 elif "-crosssdk" in taskdata:
445 pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"] 477 pkgarchs = ["${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"]
446 else: 478 else:
@@ -449,11 +481,15 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
449 pkgarchs.append('allarch') 481 pkgarchs.append('allarch')
450 pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}') 482 pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
451 483
484 searched_manifests = []
485
452 for pkgarch in pkgarchs: 486 for pkgarch in pkgarchs:
453 manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname)) 487 manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
454 if os.path.exists(manifest): 488 if os.path.exists(manifest):
455 return manifest, d2 489 return manifest, d2
456 bb.error("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant)) 490 searched_manifests.append(manifest)
491 bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s"
492 % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests)))
457 return None, d2 493 return None, d2
458 494
459def OEOuthashBasic(path, sigfile, task, d): 495def OEOuthashBasic(path, sigfile, task, d):
@@ -467,6 +503,8 @@ def OEOuthashBasic(path, sigfile, task, d):
467 import stat 503 import stat
468 import pwd 504 import pwd
469 import grp 505 import grp
506 import re
507 import fnmatch
470 508
471 def update_hash(s): 509 def update_hash(s):
472 s = s.encode('utf-8') 510 s = s.encode('utf-8')
@@ -476,20 +514,37 @@ def OEOuthashBasic(path, sigfile, task, d):
476 514
477 h = hashlib.sha256() 515 h = hashlib.sha256()
478 prev_dir = os.getcwd() 516 prev_dir = os.getcwd()
517 corebase = d.getVar("COREBASE")
518 tmpdir = d.getVar("TMPDIR")
479 include_owners = os.environ.get('PSEUDO_DISABLED') == '0' 519 include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
480 if "package_write_" in task or task == "package_qa": 520 if "package_write_" in task or task == "package_qa":
481 include_owners = False 521 include_owners = False
482 include_timestamps = False 522 include_timestamps = False
523 include_root = True
483 if task == "package": 524 if task == "package":
484 include_timestamps = d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1' 525 include_timestamps = True
485 extra_content = d.getVar('HASHEQUIV_HASH_VERSION') 526 include_root = False
527 hash_version = d.getVar('HASHEQUIV_HASH_VERSION')
528 extra_sigdata = d.getVar("HASHEQUIV_EXTRA_SIGDATA")
529
530 filemaps = {}
531 for m in (d.getVar('SSTATE_HASHEQUIV_FILEMAP') or '').split():
532 entry = m.split(":")
533 if len(entry) != 3 or entry[0] != task:
534 continue
535 filemaps.setdefault(entry[1], [])
536 filemaps[entry[1]].append(entry[2])
486 537
487 try: 538 try:
488 os.chdir(path) 539 os.chdir(path)
540 basepath = os.path.normpath(path)
489 541
490 update_hash("OEOuthashBasic\n") 542 update_hash("OEOuthashBasic\n")
491 if extra_content: 543 if hash_version:
492 update_hash(extra_content + "\n") 544 update_hash(hash_version + "\n")
545
546 if extra_sigdata:
547 update_hash(extra_sigdata + "\n")
493 548
494 # It is only currently useful to get equivalent hashes for things that 549 # It is only currently useful to get equivalent hashes for things that
495 # can be restored from sstate. Since the sstate object is named using 550 # can be restored from sstate. Since the sstate object is named using
@@ -534,28 +589,29 @@ def OEOuthashBasic(path, sigfile, task, d):
534 else: 589 else:
535 add_perm(stat.S_IXUSR, 'x') 590 add_perm(stat.S_IXUSR, 'x')
536 591
537 add_perm(stat.S_IRGRP, 'r') 592 if include_owners:
538 add_perm(stat.S_IWGRP, 'w') 593 # Group/other permissions are only relevant in pseudo context
539 if stat.S_ISGID & s.st_mode: 594 add_perm(stat.S_IRGRP, 'r')
540 add_perm(stat.S_IXGRP, 's', 'S') 595 add_perm(stat.S_IWGRP, 'w')
541 else: 596 if stat.S_ISGID & s.st_mode:
542 add_perm(stat.S_IXGRP, 'x') 597 add_perm(stat.S_IXGRP, 's', 'S')
598 else:
599 add_perm(stat.S_IXGRP, 'x')
543 600
544 add_perm(stat.S_IROTH, 'r') 601 add_perm(stat.S_IROTH, 'r')
545 add_perm(stat.S_IWOTH, 'w') 602 add_perm(stat.S_IWOTH, 'w')
546 if stat.S_ISVTX & s.st_mode: 603 if stat.S_ISVTX & s.st_mode:
547 update_hash('t') 604 update_hash('t')
548 else: 605 else:
549 add_perm(stat.S_IXOTH, 'x') 606 add_perm(stat.S_IXOTH, 'x')
550 607
551 if include_owners:
552 try: 608 try:
553 update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name) 609 update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
554 update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name) 610 update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
555 except KeyError as e: 611 except KeyError as e:
556 bb.warn("KeyError in %s" % path)
557 msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match " 612 msg = ("KeyError: %s\nPath %s is owned by uid %d, gid %d, which doesn't match "
558 "any user/group on target. This may be due to host contamination." % (e, path, s.st_uid, s.st_gid)) 613 "any user/group on target. This may be due to host contamination." %
614 (e, os.path.abspath(path), s.st_uid, s.st_gid))
559 raise Exception(msg).with_traceback(e.__traceback__) 615 raise Exception(msg).with_traceback(e.__traceback__)
560 616
561 if include_timestamps: 617 if include_timestamps:
@@ -567,8 +623,13 @@ def OEOuthashBasic(path, sigfile, task, d):
567 else: 623 else:
568 update_hash(" " * 9) 624 update_hash(" " * 9)
569 625
626 filterfile = False
627 for entry in filemaps:
628 if fnmatch.fnmatch(path, entry):
629 filterfile = True
630
570 update_hash(" ") 631 update_hash(" ")
571 if stat.S_ISREG(s.st_mode): 632 if stat.S_ISREG(s.st_mode) and not filterfile:
572 update_hash("%10d" % s.st_size) 633 update_hash("%10d" % s.st_size)
573 else: 634 else:
574 update_hash(" " * 10) 635 update_hash(" " * 10)
@@ -577,9 +638,24 @@ def OEOuthashBasic(path, sigfile, task, d):
577 fh = hashlib.sha256() 638 fh = hashlib.sha256()
578 if stat.S_ISREG(s.st_mode): 639 if stat.S_ISREG(s.st_mode):
579 # Hash file contents 640 # Hash file contents
580 with open(path, 'rb') as d: 641 if filterfile:
581 for chunk in iter(lambda: d.read(4096), b""): 642 # Need to ignore paths in crossscripts and postinst-useradd files.
643 with open(path, 'rb') as d:
644 chunk = d.read()
645 chunk = chunk.replace(bytes(basepath, encoding='utf8'), b'')
646 for entry in filemaps:
647 if not fnmatch.fnmatch(path, entry):
648 continue
649 for r in filemaps[entry]:
650 if r.startswith("regex-"):
651 chunk = re.sub(bytes(r[6:], encoding='utf8'), b'', chunk)
652 else:
653 chunk = chunk.replace(bytes(r, encoding='utf8'), b'')
582 fh.update(chunk) 654 fh.update(chunk)
655 else:
656 with open(path, 'rb') as d:
657 for chunk in iter(lambda: d.read(4096), b""):
658 fh.update(chunk)
583 update_hash(fh.hexdigest()) 659 update_hash(fh.hexdigest())
584 else: 660 else:
585 update_hash(" " * len(fh.hexdigest())) 661 update_hash(" " * len(fh.hexdigest()))
@@ -592,11 +668,16 @@ def OEOuthashBasic(path, sigfile, task, d):
592 update_hash("\n") 668 update_hash("\n")
593 669
594 # Process this directory and all its child files 670 # Process this directory and all its child files
595 process(root) 671 if include_root or root != ".":
672 process(root)
596 for f in files: 673 for f in files:
597 if f == 'fixmepath': 674 if f == 'fixmepath':
598 continue 675 continue
599 process(os.path.join(root, f)) 676 process(os.path.join(root, f))
677
678 for dir in dirs:
679 if os.path.islink(os.path.join(root, dir)):
680 process(os.path.join(root, dir))
600 finally: 681 finally:
601 os.chdir(prev_dir) 682 os.chdir(prev_dir)
602 683