diff options
author | Joshua Watt <JPEWhacker@gmail.com> | 2024-02-18 15:59:52 -0700 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2024-02-19 11:58:12 +0000 |
commit | 61e184b3ed4d9691af36917036ceeab2bc8482f9 (patch) | |
tree | 79c8256d4cd299fa2838b8672a1bee907536878f | |
parent | e5056394e030c4573ca7d65e79484a2aa8afaaed (diff) | |
download | poky-61e184b3ed4d9691af36917036ceeab2bc8482f9.tar.gz |
bitbake: siggen: Add parallel unihash exist API
Adds API to query if unihashes are known to the server in parallel
(Bitbake rev: 7e2479109b40ce82507f73b4f935903f7f79fb06)
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | bitbake/lib/bb/siggen.py | 32 |
1 files changed, 32 insertions, 0 deletions
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index e1a4fa2cdd..3ab8431a08 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
@@ -530,6 +530,11 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic): | |||
530 | class SignatureGeneratorUniHashMixIn(object): | 530 | class SignatureGeneratorUniHashMixIn(object): |
531 | def __init__(self, data): | 531 | def __init__(self, data): |
532 | self.extramethod = {} | 532 | self.extramethod = {} |
533 | # NOTE: The cache only tracks hashes that exist. Hashes that don't | ||
534 | # exist are always queries from the server since it is possible for | ||
535 | # hashes to appear over time, but much less likely for them to | ||
536 | # disappear | ||
537 | self.unihash_exists_cache = set() | ||
533 | super().__init__(data) | 538 | super().__init__(data) |
534 | 539 | ||
535 | def get_taskdata(self): | 540 | def get_taskdata(self): |
@@ -620,6 +625,33 @@ class SignatureGeneratorUniHashMixIn(object): | |||
620 | 625 | ||
621 | return method | 626 | return method |
622 | 627 | ||
628 | def unihashes_exist(self, query): | ||
629 | if len(query) == 0: | ||
630 | return {} | ||
631 | |||
632 | uncached_query = {} | ||
633 | result = {} | ||
634 | for key, unihash in query.items(): | ||
635 | if unihash in self.unihash_exists_cache: | ||
636 | result[key] = True | ||
637 | else: | ||
638 | uncached_query[key] = unihash | ||
639 | |||
640 | if self.max_parallel <= 1 or len(uncached_query) <= 1: | ||
641 | # No parallelism required. Make the query serially with the single client | ||
642 | uncached_result = { | ||
643 | key: self.client().unihash_exists(value) for key, value in uncached_query.items() | ||
644 | } | ||
645 | else: | ||
646 | uncached_result = self.client_pool().unihashes_exist(uncached_query) | ||
647 | |||
648 | for key, exists in uncached_result.items(): | ||
649 | if exists: | ||
650 | self.unihash_exists_cache.add(query[key]) | ||
651 | result[key] = exists | ||
652 | |||
653 | return result | ||
654 | |||
623 | def get_unihash(self, tid): | 655 | def get_unihash(self, tid): |
624 | return self.get_unihashes([tid])[tid] | 656 | return self.get_unihashes([tid])[tid] |
625 | 657 | ||