summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/siggen.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/siggen.py')
-rw-r--r--bitbake/lib/bb/siggen.py133
1 files changed, 78 insertions, 55 deletions
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 2a0ecf57e1..a6163b55ea 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -15,6 +15,7 @@ import difflib
15import simplediff 15import simplediff
16import json 16import json
17import types 17import types
18from contextlib import contextmanager
18import bb.compress.zstd 19import bb.compress.zstd
19from bb.checksum import FileChecksumCache 20from bb.checksum import FileChecksumCache
20from bb import runqueue 21from bb import runqueue
@@ -28,6 +29,14 @@ hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
28# The minimum version of the find_siginfo function we need 29# The minimum version of the find_siginfo function we need
29find_siginfo_minversion = 2 30find_siginfo_minversion = 2
30 31
32HASHSERV_ENVVARS = [
33 "SSL_CERT_DIR",
34 "SSL_CERT_FILE",
35 "NO_PROXY",
36 "HTTPS_PROXY",
37 "HTTP_PROXY"
38]
39
31def check_siggen_version(siggen): 40def check_siggen_version(siggen):
32 if not hasattr(siggen, "find_siginfo_version"): 41 if not hasattr(siggen, "find_siginfo_version"):
33 bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)") 42 bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)")
@@ -192,9 +201,6 @@ class SignatureGenerator(object):
192 def save_unitaskhashes(self): 201 def save_unitaskhashes(self):
193 return 202 return
194 203
195 def copy_unitaskhashes(self, targetdir):
196 return
197
198 def set_setscene_tasks(self, setscene_tasks): 204 def set_setscene_tasks(self, setscene_tasks):
199 return 205 return
200 206
@@ -372,7 +378,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
372 self.taints[tid] = taint 378 self.taints[tid] = taint
373 logger.warning("%s is tainted from a forced run" % tid) 379 logger.warning("%s is tainted from a forced run" % tid)
374 380
375 return 381 return set(dep for _, dep in self.runtaskdeps[tid])
376 382
377 def get_taskhash(self, tid, deps, dataCaches): 383 def get_taskhash(self, tid, deps, dataCaches):
378 384
@@ -409,9 +415,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
409 def save_unitaskhashes(self): 415 def save_unitaskhashes(self):
410 self.unihash_cache.save(self.unitaskhashes) 416 self.unihash_cache.save(self.unitaskhashes)
411 417
412 def copy_unitaskhashes(self, targetdir):
413 self.unihash_cache.copyfile(targetdir)
414
415 def dump_sigtask(self, mcfn, task, stampbase, runtime): 418 def dump_sigtask(self, mcfn, task, stampbase, runtime):
416 tid = mcfn + ":" + task 419 tid = mcfn + ":" + task
417 mc = bb.runqueue.mc_from_tid(mcfn) 420 mc = bb.runqueue.mc_from_tid(mcfn)
@@ -531,19 +534,28 @@ class SignatureGeneratorUniHashMixIn(object):
531 def __init__(self, data): 534 def __init__(self, data):
532 self.extramethod = {} 535 self.extramethod = {}
533 # NOTE: The cache only tracks hashes that exist. Hashes that don't 536 # NOTE: The cache only tracks hashes that exist. Hashes that don't
534 # exist are always queries from the server since it is possible for 537 # exist are always queried from the server since it is possible for
535 # hashes to appear over time, but much less likely for them to 538 # hashes to appear over time, but much less likely for them to
536 # disappear 539 # disappear
537 self.unihash_exists_cache = set() 540 self.unihash_exists_cache = set()
538 self.username = None 541 self.username = None
539 self.password = None 542 self.password = None
543 self.env = {}
544
545 origenv = data.getVar("BB_ORIGENV")
546 for e in HASHSERV_ENVVARS:
547 value = data.getVar(e)
548 if not value and origenv:
549 value = origenv.getVar(e)
550 if value:
551 self.env[e] = value
540 super().__init__(data) 552 super().__init__(data)
541 553
542 def get_taskdata(self): 554 def get_taskdata(self):
543 return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password) + super().get_taskdata() 555 return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata()
544 556
545 def set_taskdata(self, data): 557 def set_taskdata(self, data):
546 self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password = data[:6] 558 self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6]
547 super().set_taskdata(data[6:]) 559 super().set_taskdata(data[6:])
548 560
549 def get_hashserv_creds(self): 561 def get_hashserv_creds(self):
@@ -555,15 +567,27 @@ class SignatureGeneratorUniHashMixIn(object):
555 567
556 return {} 568 return {}
557 569
558 def client(self): 570 @contextmanager
559 if getattr(self, '_client', None) is None: 571 def _client_env(self):
560 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) 572 orig_env = os.environ.copy()
561 return self._client 573 try:
574 for k, v in self.env.items():
575 os.environ[k] = v
562 576
563 def client_pool(self): 577 yield
564 if getattr(self, '_client_pool', None) is None: 578 finally:
565 self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds()) 579 for k, v in self.env.items():
566 return self._client_pool 580 if k in orig_env:
581 os.environ[k] = orig_env[k]
582 else:
583 del os.environ[k]
584
585 @contextmanager
586 def client(self):
587 with self._client_env():
588 if getattr(self, '_client', None) is None:
589 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
590 yield self._client
567 591
568 def reset(self, data): 592 def reset(self, data):
569 self.__close_clients() 593 self.__close_clients()
@@ -574,12 +598,13 @@ class SignatureGeneratorUniHashMixIn(object):
574 return super().exit() 598 return super().exit()
575 599
576 def __close_clients(self): 600 def __close_clients(self):
577 if getattr(self, '_client', None) is not None: 601 with self._client_env():
578 self._client.close() 602 if getattr(self, '_client', None) is not None:
579 self._client = None 603 self._client.close()
580 if getattr(self, '_client_pool', None) is not None: 604 self._client = None
581 self._client_pool.close() 605 if getattr(self, '_client_pool', None) is not None:
582 self._client_pool = None 606 self._client_pool.close()
607 self._client_pool = None
583 608
584 def get_stampfile_hash(self, tid): 609 def get_stampfile_hash(self, tid):
585 if tid in self.taskhash: 610 if tid in self.taskhash:
@@ -640,23 +665,20 @@ class SignatureGeneratorUniHashMixIn(object):
640 if len(query) == 0: 665 if len(query) == 0:
641 return {} 666 return {}
642 667
643 uncached_query = {} 668 query_keys = []
644 result = {} 669 result = {}
645 for key, unihash in query.items(): 670 for key, unihash in query.items():
646 if unihash in self.unihash_exists_cache: 671 if unihash in self.unihash_exists_cache:
647 result[key] = True 672 result[key] = True
648 else: 673 else:
649 uncached_query[key] = unihash 674 query_keys.append(key)
650 675
651 if self.max_parallel <= 1 or len(uncached_query) <= 1: 676 if query_keys:
652 # No parallelism required. Make the query serially with the single client 677 with self.client() as client:
653 uncached_result = { 678 query_result = client.unihash_exists_batch(query[k] for k in query_keys)
654 key: self.client().unihash_exists(value) for key, value in uncached_query.items()
655 }
656 else:
657 uncached_result = self.client_pool().unihashes_exist(uncached_query)
658 679
659 for key, exists in uncached_result.items(): 680 for idx, key in enumerate(query_keys):
681 exists = query_result[idx]
660 if exists: 682 if exists:
661 self.unihash_exists_cache.add(query[key]) 683 self.unihash_exists_cache.add(query[key])
662 result[key] = exists 684 result[key] = exists
@@ -672,27 +694,24 @@ class SignatureGeneratorUniHashMixIn(object):
672 unihash 694 unihash
673 """ 695 """
674 result = {} 696 result = {}
675 queries = {} 697 query_tids = []
676 query_result = {}
677 698
678 for tid in tids: 699 for tid in tids:
679 unihash = self.get_cached_unihash(tid) 700 unihash = self.get_cached_unihash(tid)
680 if unihash: 701 if unihash:
681 result[tid] = unihash 702 result[tid] = unihash
682 else: 703 else:
683 queries[tid] = (self._get_method(tid), self.taskhash[tid]) 704 query_tids.append(tid)
684
685 if len(queries) == 0:
686 return result
687 705
688 if self.max_parallel <= 1 or len(queries) <= 1: 706 if query_tids:
689 # No parallelism required. Make the query serially with the single client 707 unihashes = []
690 for tid, args in queries.items(): 708 try:
691 query_result[tid] = self.client().get_unihash(*args) 709 with self.client() as client:
692 else: 710 unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids)
693 query_result = self.client_pool().get_unihashes(queries) 711 except (ConnectionError, FileNotFoundError) as e:
712 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
694 713
695 for tid, unihash in query_result.items(): 714 for idx, tid in enumerate(query_tids):
696 # In the absence of being able to discover a unique hash from the 715 # In the absence of being able to discover a unique hash from the
697 # server, make it be equivalent to the taskhash. The unique "hash" only 716 # server, make it be equivalent to the taskhash. The unique "hash" only
698 # really needs to be a unique string (not even necessarily a hash), but 717 # really needs to be a unique string (not even necessarily a hash), but
@@ -707,7 +726,9 @@ class SignatureGeneratorUniHashMixIn(object):
707 # to the server, there is a better chance that they will agree on 726 # to the server, there is a better chance that they will agree on
708 # the unique hash. 727 # the unique hash.
709 taskhash = self.taskhash[tid] 728 taskhash = self.taskhash[tid]
710 if unihash: 729
730 if unihashes and unihashes[idx]:
731 unihash = unihashes[idx]
711 # A unique hash equal to the taskhash is not very interesting, 732 # A unique hash equal to the taskhash is not very interesting,
712 # so it is reported it at debug level 2. If they differ, that 733 # so it is reported it at debug level 2. If they differ, that
713 # is much more interesting, so it is reported at debug level 1 734 # is much more interesting, so it is reported at debug level 1
@@ -716,7 +737,6 @@ class SignatureGeneratorUniHashMixIn(object):
716 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) 737 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
717 unihash = taskhash 738 unihash = taskhash
718 739
719
720 self.set_unihash(tid, unihash) 740 self.set_unihash(tid, unihash)
721 self.unihash[tid] = unihash 741 self.unihash[tid] = unihash
722 result[tid] = unihash 742 result[tid] = unihash
@@ -785,7 +805,9 @@ class SignatureGeneratorUniHashMixIn(object):
785 if tid in self.extramethod: 805 if tid in self.extramethod:
786 method = method + self.extramethod[tid] 806 method = method + self.extramethod[tid]
787 807
788 data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data) 808 with self.client() as client:
809 data = client.report_unihash(taskhash, method, outhash, unihash, extra_data)
810
789 new_unihash = data['unihash'] 811 new_unihash = data['unihash']
790 812
791 if new_unihash != unihash: 813 if new_unihash != unihash:
@@ -795,7 +817,7 @@ class SignatureGeneratorUniHashMixIn(object):
795 d.setVar('BB_UNIHASH', new_unihash) 817 d.setVar('BB_UNIHASH', new_unihash)
796 else: 818 else:
797 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) 819 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
798 except ConnectionError as e: 820 except (ConnectionError, FileNotFoundError) as e:
799 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 821 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
800 finally: 822 finally:
801 if sigfile: 823 if sigfile:
@@ -816,7 +838,9 @@ class SignatureGeneratorUniHashMixIn(object):
816 if tid in self.extramethod: 838 if tid in self.extramethod:
817 method = method + self.extramethod[tid] 839 method = method + self.extramethod[tid]
818 840
819 data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) 841 with self.client() as client:
842 data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
843
820 hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) 844 hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
821 845
822 if data is None: 846 if data is None:
@@ -835,7 +859,7 @@ class SignatureGeneratorUniHashMixIn(object):
835 # TODO: What to do here? 859 # TODO: What to do here?
836 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) 860 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
837 861
838 except ConnectionError as e: 862 except (ConnectionError, FileNotFoundError) as e:
839 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 863 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
840 864
841 return False 865 return False
@@ -849,13 +873,12 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
849 super().init_rundepcheck(data) 873 super().init_rundepcheck(data)
850 self.server = data.getVar('BB_HASHSERVE') 874 self.server = data.getVar('BB_HASHSERVE')
851 self.method = "sstate_output_hash" 875 self.method = "sstate_output_hash"
852 self.max_parallel = 1
853 876
854def clean_checksum_file_path(file_checksum_tuple): 877def clean_checksum_file_path(file_checksum_tuple):
855 f, cs = file_checksum_tuple 878 f, cs = file_checksum_tuple
856 if "/./" in f: 879 if "/./" in f:
857 return "./" + f.split("/./")[1] 880 return "./" + f.split("/./")[1]
858 return f 881 return os.path.basename(f)
859 882
860def dump_this_task(outfile, d): 883def dump_this_task(outfile, d):
861 import bb.parse 884 import bb.parse