diff options
| -rw-r--r-- | bitbake/lib/bb/cache.py | 10 | ||||
| -rw-r--r-- | bitbake/lib/bb/checksum.py | 25 | ||||
| -rw-r--r-- | bitbake/lib/bb/cookerdata.py | 5 | ||||
| -rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 33 |
4 files changed, 58 insertions, 15 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 958652e0e3..ec7b023fc7 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
| @@ -847,6 +847,16 @@ class MultiProcessCache(object): | |||
| 847 | data = [{}] | 847 | data = [{}] |
| 848 | return data | 848 | return data |
| 849 | 849 | ||
| 850 | def clear_cache(self): | ||
| 851 | if not self.cachefile: | ||
| 852 | bb.fatal("Can't clear invalid cachefile") | ||
| 853 | |||
| 854 | self.cachedata = self.create_cachedata() | ||
| 855 | self.cachedata_extras = self.create_cachedata() | ||
| 856 | with bb.utils.fileslocked([self.cachefile + ".lock"]): | ||
| 857 | bb.utils.remove(self.cachefile) | ||
| 858 | bb.utils.remove(self.cachefile + "-*") | ||
| 859 | |||
| 850 | def save_extras(self): | 860 | def save_extras(self): |
| 851 | if not self.cachefile: | 861 | if not self.cachefile: |
| 852 | return | 862 | return |
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py index 557793d366..3fb39a303e 100644 --- a/bitbake/lib/bb/checksum.py +++ b/bitbake/lib/bb/checksum.py | |||
| @@ -142,3 +142,28 @@ class FileChecksumCache(MultiProcessCache): | |||
| 142 | 142 | ||
| 143 | checksums.sort(key=operator.itemgetter(1)) | 143 | checksums.sort(key=operator.itemgetter(1)) |
| 144 | return checksums | 144 | return checksums |
| 145 | |||
| 146 | class RevisionsCache(MultiProcessCache): | ||
| 147 | cache_file_name = "local_srcrevisions.dat" | ||
| 148 | CACHE_VERSION = 1 | ||
| 149 | |||
| 150 | def __init__(self): | ||
| 151 | MultiProcessCache.__init__(self) | ||
| 152 | |||
| 153 | def get_revs(self): | ||
| 154 | return self.cachedata[0] | ||
| 155 | |||
| 156 | def get_rev(self, k): | ||
| 157 | if k in self.cachedata_extras[0]: | ||
| 158 | return self.cachedata_extras[0][k] | ||
| 159 | if k in self.cachedata[0]: | ||
| 160 | return self.cachedata[0][k] | ||
| 161 | return None | ||
| 162 | |||
| 163 | def set_rev(self, k, v): | ||
| 164 | self.cachedata[0][k] = v | ||
| 165 | self.cachedata_extras[0][k] = v | ||
| 166 | |||
| 167 | def merge_data(self, source, dest): | ||
| 168 | for h in source[0]: | ||
| 169 | dest[0][h] = source[0][h] | ||
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py index 3ad5cf3dd0..1f447d30c2 100644 --- a/bitbake/lib/bb/cookerdata.py +++ b/bitbake/lib/bb/cookerdata.py | |||
| @@ -1,3 +1,4 @@ | |||
| 1 | |||
| 1 | # | 2 | # |
| 2 | # Copyright (C) 2003, 2004 Chris Larson | 3 | # Copyright (C) 2003, 2004 Chris Larson |
| 3 | # Copyright (C) 2003, 2004 Phil Blundell | 4 | # Copyright (C) 2003, 2004 Phil Blundell |
| @@ -267,8 +268,8 @@ class CookerDataBuilder(object): | |||
| 267 | try: | 268 | try: |
| 268 | self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) | 269 | self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) |
| 269 | 270 | ||
| 270 | if self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker: | 271 | servercontext = self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker |
| 271 | bb.fetch.fetcher_init(self.data) | 272 | bb.fetch.fetcher_init(self.data, servercontext) |
| 272 | bb.parse.init_parser(self.data) | 273 | bb.parse.init_parser(self.data) |
| 273 | 274 | ||
| 274 | bb.event.fire(bb.event.ConfigParsed(), self.data) | 275 | bb.event.fire(bb.event.ConfigParsed(), self.data) |
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index b194a79be9..add742bfad 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
| @@ -23,13 +23,14 @@ import collections | |||
| 23 | import subprocess | 23 | import subprocess |
| 24 | import pickle | 24 | import pickle |
| 25 | import errno | 25 | import errno |
| 26 | import bb.persist_data, bb.utils | 26 | import bb.utils |
| 27 | import bb.checksum | 27 | import bb.checksum |
| 28 | import bb.process | 28 | import bb.process |
| 29 | import bb.event | 29 | import bb.event |
| 30 | 30 | ||
| 31 | __version__ = "2" | 31 | __version__ = "2" |
| 32 | _checksum_cache = bb.checksum.FileChecksumCache() | 32 | _checksum_cache = bb.checksum.FileChecksumCache() |
| 33 | _revisions_cache = bb.checksum.RevisionsCache() | ||
| 33 | 34 | ||
| 34 | logger = logging.getLogger("BitBake.Fetcher") | 35 | logger = logging.getLogger("BitBake.Fetcher") |
| 35 | 36 | ||
| @@ -493,18 +494,23 @@ methods = [] | |||
| 493 | urldata_cache = {} | 494 | urldata_cache = {} |
| 494 | saved_headrevs = {} | 495 | saved_headrevs = {} |
| 495 | 496 | ||
| 496 | def fetcher_init(d): | 497 | def fetcher_init(d, servercontext=True): |
| 497 | """ | 498 | """ |
| 498 | Called to initialize the fetchers once the configuration data is known. | 499 | Called to initialize the fetchers once the configuration data is known. |
| 499 | Calls before this must not hit the cache. | 500 | Calls before this must not hit the cache. |
| 500 | """ | 501 | """ |
| 501 | 502 | ||
| 502 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | 503 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) |
| 504 | _revisions_cache.init_cache(d.getVar("BB_CACHEDIR")) | ||
| 505 | |||
| 506 | if not servercontext: | ||
| 507 | return | ||
| 508 | |||
| 503 | try: | 509 | try: |
| 504 | # fetcher_init is called multiple times, so make sure we only save the | 510 | # fetcher_init is called multiple times, so make sure we only save the |
| 505 | # revs the first time it is called. | 511 | # revs the first time it is called. |
| 506 | if not bb.fetch2.saved_headrevs: | 512 | if not bb.fetch2.saved_headrevs: |
| 507 | bb.fetch2.saved_headrevs = dict(revs) | 513 | bb.fetch2.saved_headrevs = _revisions_cache.get_revs() |
| 508 | except: | 514 | except: |
| 509 | pass | 515 | pass |
| 510 | 516 | ||
| @@ -514,11 +520,10 @@ def fetcher_init(d): | |||
| 514 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 520 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 515 | elif srcrev_policy == "clear": | 521 | elif srcrev_policy == "clear": |
| 516 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | 522 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 517 | revs.clear() | 523 | _revisions_cache.clear_cache() |
| 518 | else: | 524 | else: |
| 519 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | 525 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) |
| 520 | 526 | ||
| 521 | _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) | ||
| 522 | 527 | ||
| 523 | for m in methods: | 528 | for m in methods: |
| 524 | if hasattr(m, "init"): | 529 | if hasattr(m, "init"): |
| @@ -526,9 +531,11 @@ def fetcher_init(d): | |||
| 526 | 531 | ||
| 527 | def fetcher_parse_save(): | 532 | def fetcher_parse_save(): |
| 528 | _checksum_cache.save_extras() | 533 | _checksum_cache.save_extras() |
| 534 | _revisions_cache.save_extras() | ||
| 529 | 535 | ||
| 530 | def fetcher_parse_done(): | 536 | def fetcher_parse_done(): |
| 531 | _checksum_cache.save_merge() | 537 | _checksum_cache.save_merge() |
| 538 | _revisions_cache.save_merge() | ||
| 532 | 539 | ||
| 533 | def fetcher_compare_revisions(d): | 540 | def fetcher_compare_revisions(d): |
| 534 | """ | 541 | """ |
| @@ -536,7 +543,7 @@ def fetcher_compare_revisions(d): | |||
| 536 | when bitbake was started and return true if they have changed. | 543 | when bitbake was started and return true if they have changed. |
| 537 | """ | 544 | """ |
| 538 | 545 | ||
| 539 | headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) | 546 | headrevs = _revisions_cache.get_revs() |
| 540 | return headrevs != bb.fetch2.saved_headrevs | 547 | return headrevs != bb.fetch2.saved_headrevs |
| 541 | 548 | ||
| 542 | def mirror_from_string(data): | 549 | def mirror_from_string(data): |
| @@ -1662,13 +1669,13 @@ class FetchMethod(object): | |||
| 1662 | if not hasattr(self, "_latest_revision"): | 1669 | if not hasattr(self, "_latest_revision"): |
| 1663 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) | 1670 | raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) |
| 1664 | 1671 | ||
| 1665 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | ||
| 1666 | key = self.generate_revision_key(ud, d, name) | 1672 | key = self.generate_revision_key(ud, d, name) |
| 1667 | try: | 1673 | |
| 1668 | return revs[key] | 1674 | rev = _revisions_cache.get_rev(key) |
| 1669 | except KeyError: | 1675 | if rev is None: |
| 1670 | revs[key] = rev = self._latest_revision(ud, d, name) | 1676 | rev = self._latest_revision(ud, d, name) |
| 1671 | return rev | 1677 | _revisions_cache.set_rev(key, rev) |
| 1678 | return rev | ||
| 1672 | 1679 | ||
| 1673 | def sortable_revision(self, ud, d, name): | 1680 | def sortable_revision(self, ud, d, name): |
| 1674 | latest_rev = self._build_revision(ud, d, name) | 1681 | latest_rev = self._build_revision(ud, d, name) |
