diff options
| author | Joshua Watt <JPEWhacker@gmail.com> | 2024-04-12 09:57:09 -0600 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2024-04-16 07:33:19 +0100 |
| commit | 2ecd97fa59fe6b023464b546ad153e6e7c137cf2 (patch) | |
| tree | 74c090e24e19b67c7f2a3283b61bf4734b4cd6ef | |
| parent | 6bd8367aa98e3108e9f1de9bdf809ded5dfc7413 (diff) | |
| download | poky-2ecd97fa59fe6b023464b546ad153e6e7c137cf2.tar.gz | |
bitbake: siggen: Capture SSL environment for hashserver
Now that the bitbake hash server supports SSL connections, we need to
capture a few environment variables which can affect the ability to
connect via SSL. Note that the variables are only put in place to affect
the environment while actually invoking the server
[RP: Tweak to use BB_ORIGENV as well]
[RP: Tweak to handle os.environ restore correctly]
(Bitbake rev: 0bacf6551821beb8915513b120ae672ae8eb1612)
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
| -rw-r--r-- | bitbake/lib/bb/siggen.py | 94 |
1 files changed, 70 insertions, 24 deletions
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 2a0ecf57e1..8ab08ec961 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
| @@ -15,6 +15,7 @@ import difflib | |||
| 15 | import simplediff | 15 | import simplediff |
| 16 | import json | 16 | import json |
| 17 | import types | 17 | import types |
| 18 | from contextlib import contextmanager | ||
| 18 | import bb.compress.zstd | 19 | import bb.compress.zstd |
| 19 | from bb.checksum import FileChecksumCache | 20 | from bb.checksum import FileChecksumCache |
| 20 | from bb import runqueue | 21 | from bb import runqueue |
| @@ -28,6 +29,14 @@ hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv') | |||
| 28 | # The minimum version of the find_siginfo function we need | 29 | # The minimum version of the find_siginfo function we need |
| 29 | find_siginfo_minversion = 2 | 30 | find_siginfo_minversion = 2 |
| 30 | 31 | ||
| 32 | HASHSERV_ENVVARS = [ | ||
| 33 | "SSL_CERT_DIR", | ||
| 34 | "SSL_CERT_FILE", | ||
| 35 | "NO_PROXY", | ||
| 36 | "HTTPS_PROXY", | ||
| 37 | "HTTP_PROXY" | ||
| 38 | ] | ||
| 39 | |||
| 31 | def check_siggen_version(siggen): | 40 | def check_siggen_version(siggen): |
| 32 | if not hasattr(siggen, "find_siginfo_version"): | 41 | if not hasattr(siggen, "find_siginfo_version"): |
| 33 | bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)") | 42 | bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)") |
| @@ -537,14 +546,23 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 537 | self.unihash_exists_cache = set() | 546 | self.unihash_exists_cache = set() |
| 538 | self.username = None | 547 | self.username = None |
| 539 | self.password = None | 548 | self.password = None |
| 549 | self.env = {} | ||
| 550 | |||
| 551 | origenv = data.getVar("BB_ORIGENV") | ||
| 552 | for e in HASHSERV_ENVVARS: | ||
| 553 | value = data.getVar(e) | ||
| 554 | if not value and origenv: | ||
| 555 | value = origenv.getVar(e) | ||
| 556 | if value: | ||
| 557 | self.env[e] = value | ||
| 540 | super().__init__(data) | 558 | super().__init__(data) |
| 541 | 559 | ||
| 542 | def get_taskdata(self): | 560 | def get_taskdata(self): |
| 543 | return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password) + super().get_taskdata() | 561 | return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata() |
| 544 | 562 | ||
| 545 | def set_taskdata(self, data): | 563 | def set_taskdata(self, data): |
| 546 | self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password = data[:6] | 564 | self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7] |
| 547 | super().set_taskdata(data[6:]) | 565 | super().set_taskdata(data[7:]) |
| 548 | 566 | ||
| 549 | def get_hashserv_creds(self): | 567 | def get_hashserv_creds(self): |
| 550 | if self.username and self.password: | 568 | if self.username and self.password: |
| @@ -555,15 +573,34 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 555 | 573 | ||
| 556 | return {} | 574 | return {} |
| 557 | 575 | ||
| 576 | @contextmanager | ||
| 577 | def _client_env(self): | ||
| 578 | orig_env = os.environ.copy() | ||
| 579 | try: | ||
| 580 | for k, v in self.env.items(): | ||
| 581 | os.environ[k] = v | ||
| 582 | |||
| 583 | yield | ||
| 584 | finally: | ||
| 585 | for k, v in self.env.items(): | ||
| 586 | if k in orig_env: | ||
| 587 | os.environ[k] = orig_env[k] | ||
| 588 | else: | ||
| 589 | del os.environ[k] | ||
| 590 | |||
| 591 | @contextmanager | ||
| 558 | def client(self): | 592 | def client(self): |
| 559 | if getattr(self, '_client', None) is None: | 593 | with self._client_env(): |
| 560 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) | 594 | if getattr(self, '_client', None) is None: |
| 561 | return self._client | 595 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) |
| 596 | yield self._client | ||
| 562 | 597 | ||
| 598 | @contextmanager | ||
| 563 | def client_pool(self): | 599 | def client_pool(self): |
| 564 | if getattr(self, '_client_pool', None) is None: | 600 | with self._client_env(): |
| 565 | self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds()) | 601 | if getattr(self, '_client_pool', None) is None: |
| 566 | return self._client_pool | 602 | self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds()) |
| 603 | yield self._client_pool | ||
| 567 | 604 | ||
| 568 | def reset(self, data): | 605 | def reset(self, data): |
| 569 | self.__close_clients() | 606 | self.__close_clients() |
| @@ -574,12 +611,13 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 574 | return super().exit() | 611 | return super().exit() |
| 575 | 612 | ||
| 576 | def __close_clients(self): | 613 | def __close_clients(self): |
| 577 | if getattr(self, '_client', None) is not None: | 614 | with self._client_env(): |
| 578 | self._client.close() | 615 | if getattr(self, '_client', None) is not None: |
| 579 | self._client = None | 616 | self._client.close() |
| 580 | if getattr(self, '_client_pool', None) is not None: | 617 | self._client = None |
| 581 | self._client_pool.close() | 618 | if getattr(self, '_client_pool', None) is not None: |
| 582 | self._client_pool = None | 619 | self._client_pool.close() |
| 620 | self._client_pool = None | ||
| 583 | 621 | ||
| 584 | def get_stampfile_hash(self, tid): | 622 | def get_stampfile_hash(self, tid): |
| 585 | if tid in self.taskhash: | 623 | if tid in self.taskhash: |
| @@ -650,11 +688,13 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 650 | 688 | ||
| 651 | if self.max_parallel <= 1 or len(uncached_query) <= 1: | 689 | if self.max_parallel <= 1 or len(uncached_query) <= 1: |
| 652 | # No parallelism required. Make the query serially with the single client | 690 | # No parallelism required. Make the query serially with the single client |
| 653 | uncached_result = { | 691 | with self.client() as client: |
| 654 | key: self.client().unihash_exists(value) for key, value in uncached_query.items() | 692 | uncached_result = { |
| 655 | } | 693 | key: client.unihash_exists(value) for key, value in uncached_query.items() |
| 694 | } | ||
| 656 | else: | 695 | else: |
| 657 | uncached_result = self.client_pool().unihashes_exist(uncached_query) | 696 | with self.client_pool() as client_pool: |
| 697 | uncached_result = client_pool.unihashes_exist(uncached_query) | ||
| 658 | 698 | ||
| 659 | for key, exists in uncached_result.items(): | 699 | for key, exists in uncached_result.items(): |
| 660 | if exists: | 700 | if exists: |
| @@ -687,10 +727,12 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 687 | 727 | ||
| 688 | if self.max_parallel <= 1 or len(queries) <= 1: | 728 | if self.max_parallel <= 1 or len(queries) <= 1: |
| 689 | # No parallelism required. Make the query serially with the single client | 729 | # No parallelism required. Make the query serially with the single client |
| 690 | for tid, args in queries.items(): | 730 | with self.client() as client: |
| 691 | query_result[tid] = self.client().get_unihash(*args) | 731 | for tid, args in queries.items(): |
| 732 | query_result[tid] = client.get_unihash(*args) | ||
| 692 | else: | 733 | else: |
| 693 | query_result = self.client_pool().get_unihashes(queries) | 734 | with self.client_pool() as client_pool: |
| 735 | query_result = client_pool.get_unihashes(queries) | ||
| 694 | 736 | ||
| 695 | for tid, unihash in query_result.items(): | 737 | for tid, unihash in query_result.items(): |
| 696 | # In the absence of being able to discover a unique hash from the | 738 | # In the absence of being able to discover a unique hash from the |
| @@ -785,7 +827,9 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 785 | if tid in self.extramethod: | 827 | if tid in self.extramethod: |
| 786 | method = method + self.extramethod[tid] | 828 | method = method + self.extramethod[tid] |
| 787 | 829 | ||
| 788 | data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data) | 830 | with self.client() as client: |
| 831 | data = client.report_unihash(taskhash, method, outhash, unihash, extra_data) | ||
| 832 | |||
| 789 | new_unihash = data['unihash'] | 833 | new_unihash = data['unihash'] |
| 790 | 834 | ||
| 791 | if new_unihash != unihash: | 835 | if new_unihash != unihash: |
| @@ -816,7 +860,9 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 816 | if tid in self.extramethod: | 860 | if tid in self.extramethod: |
| 817 | method = method + self.extramethod[tid] | 861 | method = method + self.extramethod[tid] |
| 818 | 862 | ||
| 819 | data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) | 863 | with self.client() as client: |
| 864 | data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) | ||
| 865 | |||
| 820 | hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) | 866 | hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) |
| 821 | 867 | ||
| 822 | if data is None: | 868 | if data is None: |
