diff options
author | Chris Laplante <chris.laplante@agilent.com> | 2024-11-25 12:22:04 -0500 |
---|---|---|
committer | Steve Sakoman <steve@sakoman.com> | 2024-11-30 05:41:59 -0800 |
commit | 87e1004fc4d1274824b8ece09ea04876ac0126ca (patch) | |
tree | 1a0c85264b6210cbf95f79fb0c7b4d3cb0333eb2 /bitbake | |
parent | d28c963ec40587cea2d82dde43f0612f0faef12f (diff) | |
download | poky-87e1004fc4d1274824b8ece09ea04876ac0126ca.tar.gz |
bitbake: fetch2: use persist_data context managers
Python 3.13 emits a ResourceWarning for unclosed sqlite3 `Connection`s.
See https://docs.python.org/3/whatsnew/3.13.html#sqlite3
The previous commit fixed persist_data's context manager to close the
connection, but we were never actually using `with` in the first place.
This change is not necessary on 'master' because persist_data was
removed.
(Bitbake rev: 9602a684568910fd333ffce907fa020ad3661c26)
Signed-off-by: Chris Laplante <chris.laplante@agilent.com>
(cherry picked from commit 61f803c7d92a012b62837b0cdae4789a394b260e)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Signed-off-by: Steve Sakoman <steve@sakoman.com>
Diffstat (limited to 'bitbake')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 62 |
1 files changed, 31 insertions, 31 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 33a8086f27..8f0ed2b9e2 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -499,30 +499,30 @@ def fetcher_init(d): | |||
499 | Calls before this must not hit the cache. | 499 | Calls before this must not hit the cache. |
500 | """ | 500 | """ |
501 | 501 | ||
502 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | 502 | with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs: |
503 | try: | 503 | try: |
504 | # fetcher_init is called multiple times, so make sure we only save the | 504 | # fetcher_init is called multiple times, so make sure we only save the |
505 | # revs the first time it is called. | 505 | # revs the first time it is called. |
506 | if not bb.fetch2.saved_headrevs: | 506 | if not bb.fetch2.saved_headrevs: |
507 | bb.fetch2.saved_headrevs = dict(revs) | 507 | bb.fetch2.saved_headrevs = dict(revs) |
508 | except: | 508 | except: |
509 | pass | 509 | pass |
510 | |||
511 | # When to drop SCM head revisions controlled by user policy | ||
512 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" | ||
513 | if srcrev_policy == "cache": | ||
514 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
515 | elif srcrev_policy == "clear": | ||
516 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
517 | revs.clear() | ||
518 | else: | ||
519 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | ||
520 | 510 | ||
521 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) | 511 | # When to drop SCM head revisions controlled by user policy |
512 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" | ||
513 | if srcrev_policy == "cache": | ||
514 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
515 | elif srcrev_policy == "clear": | ||
516 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | ||
517 | revs.clear() | ||
518 | else: | ||
519 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | ||
520 | |||
521 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) | ||
522 | 522 | ||
523 | for m in methods: | 523 | for m in methods: |
524 | if hasattr(m, "init"): | 524 | if hasattr(m, "init"): |
525 | m.init(d) | 525 | m.init(d) |
526 | 526 | ||
527 | def fetcher_parse_save(): | 527 | def fetcher_parse_save(): |
528 | _checksum_cache.save_extras() | 528 | _checksum_cache.save_extras() |
@@ -536,8 +536,8 @@ def fetcher_compare_revisions(d): | |||
536 | when bitbake was started and return true if they have changed. | 536 | when bitbake was started and return true if they have changed. |
537 | """ | 537 | """ |
538 | 538 | ||
539 | headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) | 539 | with dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) as headrevs: |
540 | return headrevs != bb.fetch2.saved_headrevs | 540 | return headrevs != bb.fetch2.saved_headrevs |
541 | 541 | ||
542 | def mirror_from_string(data): | 542 | def mirror_from_string(data): |
543 | mirrors = (data or "").replace('\\n',' ').split() | 543 | mirrors = (data or "").replace('\\n',' ').split() |
@@ -1662,13 +1662,13 @@ class FetchMethod(object): | |||
1662 | if not hasattr(self, "_latest_revision"): | 1662 | if not hasattr(self, "_latest_revision"): |
1663 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) | 1663 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) |
1664 | 1664 | ||
1665 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | 1665 | with bb.persist_data.persist('BB_URI_HEADREVS', d) as revs: |
1666 | key = self.generate_revision_key(ud, d, name) | 1666 | key = self.generate_revision_key(ud, d, name) |
1667 | try: | 1667 | try: |
1668 | return revs[key] | 1668 | return revs[key] |
1669 | except KeyError: | 1669 | except KeyError: |
1670 | revs[key] = rev = self._latest_revision(ud, d, name) | 1670 | revs[key] = rev = self._latest_revision(ud, d, name) |
1671 | return rev | 1671 | return rev |
1672 | 1672 | ||
1673 | def sortable_revision(self, ud, d, name): | 1673 | def sortable_revision(self, ud, d, name): |
1674 | latest_rev = self._build_revision(ud, d, name) | 1674 | latest_rev = self._build_revision(ud, d, name) |