diff options
Diffstat (limited to 'bitbake/lib/bb/siggen.py')
| -rw-r--r-- | bitbake/lib/bb/siggen.py | 77 |
1 files changed, 27 insertions, 50 deletions
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 8ab08ec961..41eb643012 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
| @@ -201,9 +201,6 @@ class SignatureGenerator(object): | |||
| 201 | def save_unitaskhashes(self): | 201 | def save_unitaskhashes(self): |
| 202 | return | 202 | return |
| 203 | 203 | ||
| 204 | def copy_unitaskhashes(self, targetdir): | ||
| 205 | return | ||
| 206 | |||
| 207 | def set_setscene_tasks(self, setscene_tasks): | 204 | def set_setscene_tasks(self, setscene_tasks): |
| 208 | return | 205 | return |
| 209 | 206 | ||
| @@ -381,7 +378,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
| 381 | self.taints[tid] = taint | 378 | self.taints[tid] = taint |
| 382 | logger.warning("%s is tainted from a forced run" % tid) | 379 | logger.warning("%s is tainted from a forced run" % tid) |
| 383 | 380 | ||
| 384 | return | 381 | return set(dep for _, dep in self.runtaskdeps[tid]) |
| 385 | 382 | ||
| 386 | def get_taskhash(self, tid, deps, dataCaches): | 383 | def get_taskhash(self, tid, deps, dataCaches): |
| 387 | 384 | ||
| @@ -418,9 +415,6 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
| 418 | def save_unitaskhashes(self): | 415 | def save_unitaskhashes(self): |
| 419 | self.unihash_cache.save(self.unitaskhashes) | 416 | self.unihash_cache.save(self.unitaskhashes) |
| 420 | 417 | ||
| 421 | def copy_unitaskhashes(self, targetdir): | ||
| 422 | self.unihash_cache.copyfile(targetdir) | ||
| 423 | |||
| 424 | def dump_sigtask(self, mcfn, task, stampbase, runtime): | 418 | def dump_sigtask(self, mcfn, task, stampbase, runtime): |
| 425 | tid = mcfn + ":" + task | 419 | tid = mcfn + ":" + task |
| 426 | mc = bb.runqueue.mc_from_tid(mcfn) | 420 | mc = bb.runqueue.mc_from_tid(mcfn) |
| @@ -540,7 +534,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 540 | def __init__(self, data): | 534 | def __init__(self, data): |
| 541 | self.extramethod = {} | 535 | self.extramethod = {} |
| 542 | # NOTE: The cache only tracks hashes that exist. Hashes that don't | 536 | # NOTE: The cache only tracks hashes that exist. Hashes that don't |
| 543 | # exist are always queries from the server since it is possible for | 537 | # exist are always queried from the server since it is possible for |
| 544 | # hashes to appear over time, but much less likely for them to | 538 | # hashes to appear over time, but much less likely for them to |
| 545 | # disappear | 539 | # disappear |
| 546 | self.unihash_exists_cache = set() | 540 | self.unihash_exists_cache = set() |
| @@ -558,11 +552,11 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 558 | super().__init__(data) | 552 | super().__init__(data) |
| 559 | 553 | ||
| 560 | def get_taskdata(self): | 554 | def get_taskdata(self): |
| 561 | return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata() | 555 | return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata() |
| 562 | 556 | ||
| 563 | def set_taskdata(self, data): | 557 | def set_taskdata(self, data): |
| 564 | self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7] | 558 | self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6] |
| 565 | super().set_taskdata(data[7:]) | 559 | super().set_taskdata(data[6:]) |
| 566 | 560 | ||
| 567 | def get_hashserv_creds(self): | 561 | def get_hashserv_creds(self): |
| 568 | if self.username and self.password: | 562 | if self.username and self.password: |
| @@ -595,13 +589,6 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 595 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) | 589 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) |
| 596 | yield self._client | 590 | yield self._client |
| 597 | 591 | ||
| 598 | @contextmanager | ||
| 599 | def client_pool(self): | ||
| 600 | with self._client_env(): | ||
| 601 | if getattr(self, '_client_pool', None) is None: | ||
| 602 | self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds()) | ||
| 603 | yield self._client_pool | ||
| 604 | |||
| 605 | def reset(self, data): | 592 | def reset(self, data): |
| 606 | self.__close_clients() | 593 | self.__close_clients() |
| 607 | return super().reset(data) | 594 | return super().reset(data) |
| @@ -678,25 +665,20 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 678 | if len(query) == 0: | 665 | if len(query) == 0: |
| 679 | return {} | 666 | return {} |
| 680 | 667 | ||
| 681 | uncached_query = {} | 668 | query_keys = [] |
| 682 | result = {} | 669 | result = {} |
| 683 | for key, unihash in query.items(): | 670 | for key, unihash in query.items(): |
| 684 | if unihash in self.unihash_exists_cache: | 671 | if unihash in self.unihash_exists_cache: |
| 685 | result[key] = True | 672 | result[key] = True |
| 686 | else: | 673 | else: |
| 687 | uncached_query[key] = unihash | 674 | query_keys.append(key) |
| 688 | 675 | ||
| 689 | if self.max_parallel <= 1 or len(uncached_query) <= 1: | 676 | if query_keys: |
| 690 | # No parallelism required. Make the query serially with the single client | ||
| 691 | with self.client() as client: | 677 | with self.client() as client: |
| 692 | uncached_result = { | 678 | query_result = client.unihash_exists_batch(query[k] for k in query_keys) |
| 693 | key: client.unihash_exists(value) for key, value in uncached_query.items() | ||
| 694 | } | ||
| 695 | else: | ||
| 696 | with self.client_pool() as client_pool: | ||
| 697 | uncached_result = client_pool.unihashes_exist(uncached_query) | ||
| 698 | 679 | ||
| 699 | for key, exists in uncached_result.items(): | 680 | for idx, key in enumerate(query_keys): |
| 681 | exists = query_result[idx] | ||
| 700 | if exists: | 682 | if exists: |
| 701 | self.unihash_exists_cache.add(query[key]) | 683 | self.unihash_exists_cache.add(query[key]) |
| 702 | result[key] = exists | 684 | result[key] = exists |
| @@ -712,29 +694,24 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 712 | unihash | 694 | unihash |
| 713 | """ | 695 | """ |
| 714 | result = {} | 696 | result = {} |
| 715 | queries = {} | 697 | query_tids = [] |
| 716 | query_result = {} | ||
| 717 | 698 | ||
| 718 | for tid in tids: | 699 | for tid in tids: |
| 719 | unihash = self.get_cached_unihash(tid) | 700 | unihash = self.get_cached_unihash(tid) |
| 720 | if unihash: | 701 | if unihash: |
| 721 | result[tid] = unihash | 702 | result[tid] = unihash |
| 722 | else: | 703 | else: |
| 723 | queries[tid] = (self._get_method(tid), self.taskhash[tid]) | 704 | query_tids.append(tid) |
| 724 | |||
| 725 | if len(queries) == 0: | ||
| 726 | return result | ||
| 727 | 705 | ||
| 728 | if self.max_parallel <= 1 or len(queries) <= 1: | 706 | if query_tids: |
| 729 | # No parallelism required. Make the query serially with the single client | 707 | unihashes = [] |
| 730 | with self.client() as client: | 708 | try: |
| 731 | for tid, args in queries.items(): | 709 | with self.client() as client: |
| 732 | query_result[tid] = client.get_unihash(*args) | 710 | unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids) |
| 733 | else: | 711 | except (ConnectionError, FileNotFoundError, EOFError) as e: |
| 734 | with self.client_pool() as client_pool: | 712 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
| 735 | query_result = client_pool.get_unihashes(queries) | ||
| 736 | 713 | ||
| 737 | for tid, unihash in query_result.items(): | 714 | for idx, tid in enumerate(query_tids): |
| 738 | # In the absence of being able to discover a unique hash from the | 715 | # In the absence of being able to discover a unique hash from the |
| 739 | # server, make it be equivalent to the taskhash. The unique "hash" only | 716 | # server, make it be equivalent to the taskhash. The unique "hash" only |
| 740 | # really needs to be a unique string (not even necessarily a hash), but | 717 | # really needs to be a unique string (not even necessarily a hash), but |
| @@ -749,7 +726,9 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 749 | # to the server, there is a better chance that they will agree on | 726 | # to the server, there is a better chance that they will agree on |
| 750 | # the unique hash. | 727 | # the unique hash. |
| 751 | taskhash = self.taskhash[tid] | 728 | taskhash = self.taskhash[tid] |
| 752 | if unihash: | 729 | |
| 730 | if unihashes and unihashes[idx]: | ||
| 731 | unihash = unihashes[idx] | ||
| 753 | # A unique hash equal to the taskhash is not very interesting, | 732 | # A unique hash equal to the taskhash is not very interesting, |
| 754 | # so it is reported it at debug level 2. If they differ, that | 733 | # so it is reported it at debug level 2. If they differ, that |
| 755 | # is much more interesting, so it is reported at debug level 1 | 734 | # is much more interesting, so it is reported at debug level 1 |
| @@ -758,7 +737,6 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 758 | hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) | 737 | hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) |
| 759 | unihash = taskhash | 738 | unihash = taskhash |
| 760 | 739 | ||
| 761 | |||
| 762 | self.set_unihash(tid, unihash) | 740 | self.set_unihash(tid, unihash) |
| 763 | self.unihash[tid] = unihash | 741 | self.unihash[tid] = unihash |
| 764 | result[tid] = unihash | 742 | result[tid] = unihash |
| @@ -839,7 +817,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 839 | d.setVar('BB_UNIHASH', new_unihash) | 817 | d.setVar('BB_UNIHASH', new_unihash) |
| 840 | else: | 818 | else: |
| 841 | hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) | 819 | hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) |
| 842 | except ConnectionError as e: | 820 | except (ConnectionError, FileNotFoundError, EOFError) as e: |
| 843 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | 821 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
| 844 | finally: | 822 | finally: |
| 845 | if sigfile: | 823 | if sigfile: |
| @@ -881,7 +859,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 881 | # TODO: What to do here? | 859 | # TODO: What to do here? |
| 882 | hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) | 860 | hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) |
| 883 | 861 | ||
| 884 | except ConnectionError as e: | 862 | except (ConnectionError, FileNotFoundError, EOFError) as e: |
| 885 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | 863 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
| 886 | 864 | ||
| 887 | return False | 865 | return False |
| @@ -895,13 +873,12 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG | |||
| 895 | super().init_rundepcheck(data) | 873 | super().init_rundepcheck(data) |
| 896 | self.server = data.getVar('BB_HASHSERVE') | 874 | self.server = data.getVar('BB_HASHSERVE') |
| 897 | self.method = "sstate_output_hash" | 875 | self.method = "sstate_output_hash" |
| 898 | self.max_parallel = 1 | ||
| 899 | 876 | ||
| 900 | def clean_checksum_file_path(file_checksum_tuple): | 877 | def clean_checksum_file_path(file_checksum_tuple): |
| 901 | f, cs = file_checksum_tuple | 878 | f, cs = file_checksum_tuple |
| 902 | if "/./" in f: | 879 | if "/./" in f: |
| 903 | return "./" + f.split("/./")[1] | 880 | return "./" + f.split("/./")[1] |
| 904 | return f | 881 | return os.path.basename(f) |
| 905 | 882 | ||
| 906 | def dump_this_task(outfile, d): | 883 | def dump_this_task(outfile, d): |
| 907 | import bb.parse | 884 | import bb.parse |
