diff options
author | Joshua Watt <JPEWhacker@gmail.com> | 2020-06-05 22:15:30 -0500 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2020-06-10 12:30:01 +0100 |
commit | f8163c22f46551f81fd8cb74856d021a46cc0d65 (patch) | |
tree | a0fdc2d88dc6e526f4defd4b4cb7603276bd5d25 /bitbake/lib/bb/cache.py | |
parent | b9fdb6a4261754459a01f9689010a38922fe0c8a (diff) | |
download | poky-f8163c22f46551f81fd8cb74856d021a46cc0d65.tar.gz |
bitbake: bitbake: cache: Use multiconfig aware caches
Splits the parsing cache to maintain one cache per multiconfig instead
of one global cache. This is necessary now that the files and appends
can vary for each multiconfig. A bb.cache.MulticonfigCache
dictionary-like proxy object is created instead of a single
bb.cache.Cache object. This object will create and properly initialize
bb.cache.Cache object for each multiconfig, and each of these caches has
a dedicated cache file with a name based on the multiconfig.
(Bitbake rev: 5272f2489586479880ae8d046dfcdbe0963ee5bb)
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/cache.py')
-rw-r--r-- | bitbake/lib/bb/cache.py | 143 |
1 files changed, 108 insertions, 35 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index aa5ec5b591..954418384b 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -19,7 +19,7 @@ | |||
19 | import os | 19 | import os |
20 | import logging | 20 | import logging |
21 | import pickle | 21 | import pickle |
22 | from collections import defaultdict | 22 | from collections import defaultdict, Mapping |
23 | import bb.utils | 23 | import bb.utils |
24 | import re | 24 | import re |
25 | 25 | ||
@@ -27,8 +27,11 @@ logger = logging.getLogger("BitBake.Cache") | |||
27 | 27 | ||
28 | __cache_version__ = "152" | 28 | __cache_version__ = "152" |
29 | 29 | ||
30 | def getCacheFile(path, filename, data_hash): | 30 | def getCacheFile(path, filename, mc, data_hash): |
31 | return os.path.join(path, filename + "." + data_hash) | 31 | mcspec = '' |
32 | if mc: | ||
33 | mcspec = ".%s" % mc | ||
34 | return os.path.join(path, filename + mcspec + "." + data_hash) | ||
32 | 35 | ||
33 | # RecipeInfoCommon defines common data retrieving methods | 36 | # RecipeInfoCommon defines common data retrieving methods |
34 | # from meta data for caches. CoreRecipeInfo as well as other | 37 | # from meta data for caches. CoreRecipeInfo as well as other |
@@ -354,14 +357,14 @@ class Cache(NoCache): | |||
354 | """ | 357 | """ |
355 | BitBake Cache implementation | 358 | BitBake Cache implementation |
356 | """ | 359 | """ |
357 | 360 | def __init__(self, databuilder, mc, data_hash, caches_array): | |
358 | def __init__(self, databuilder, data_hash, caches_array): | ||
359 | super().__init__(databuilder) | 361 | super().__init__(databuilder) |
360 | data = databuilder.data | 362 | data = databuilder.data |
361 | 363 | ||
362 | # Pass caches_array information into Cache Constructor | 364 | # Pass caches_array information into Cache Constructor |
363 | # It will be used later for deciding whether we | 365 | # It will be used later for deciding whether we |
364 | # need extra cache file dump/load support | 366 | # need extra cache file dump/load support |
367 | self.mc = mc | ||
365 | self.caches_array = caches_array | 368 | self.caches_array = caches_array |
366 | self.cachedir = data.getVar("CACHE") | 369 | self.cachedir = data.getVar("CACHE") |
367 | self.clean = set() | 370 | self.clean = set() |
@@ -379,7 +382,17 @@ class Cache(NoCache): | |||
379 | return | 382 | return |
380 | 383 | ||
381 | self.has_cache = True | 384 | self.has_cache = True |
382 | self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash) | 385 | |
386 | def getCacheFile(self, cachefile): | ||
387 | return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash) | ||
388 | |||
389 | def prepare_cache(self, progress): | ||
390 | if not self.has_cache: | ||
391 | return 0 | ||
392 | |||
393 | loaded = 0 | ||
394 | |||
395 | self.cachefile = self.getCacheFile("bb_cache.dat") | ||
383 | 396 | ||
384 | logger.debug(1, "Cache dir: %s", self.cachedir) | 397 | logger.debug(1, "Cache dir: %s", self.cachedir) |
385 | bb.utils.mkdirhier(self.cachedir) | 398 | bb.utils.mkdirhier(self.cachedir) |
@@ -387,18 +400,22 @@ class Cache(NoCache): | |||
387 | cache_ok = True | 400 | cache_ok = True |
388 | if self.caches_array: | 401 | if self.caches_array: |
389 | for cache_class in self.caches_array: | 402 | for cache_class in self.caches_array: |
390 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 403 | cachefile = self.getCacheFile(cache_class.cachefile) |
391 | cache_ok = cache_ok and os.path.exists(cachefile) | 404 | cache_ok = cache_ok and os.path.exists(cachefile) |
392 | cache_class.init_cacheData(self) | 405 | cache_class.init_cacheData(self) |
393 | if cache_ok: | 406 | if cache_ok: |
394 | self.load_cachefile() | 407 | loaded = self.load_cachefile(progress) |
395 | elif os.path.isfile(self.cachefile): | 408 | elif os.path.isfile(self.cachefile): |
396 | logger.info("Out of date cache found, rebuilding...") | 409 | logger.info("Out of date cache found, rebuilding...") |
397 | else: | 410 | else: |
398 | logger.debug(1, "Cache file %s not found, building..." % self.cachefile) | 411 | logger.debug(1, "Cache file %s not found, building..." % self.cachefile) |
399 | 412 | ||
400 | # We don't use the symlink, its just for debugging convinience | 413 | # We don't use the symlink, its just for debugging convinience |
401 | symlink = os.path.join(self.cachedir, "bb_cache.dat") | 414 | if self.mc: |
415 | symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc) | ||
416 | else: | ||
417 | symlink = os.path.join(self.cachedir, "bb_cache.dat") | ||
418 | |||
402 | if os.path.exists(symlink): | 419 | if os.path.exists(symlink): |
403 | bb.utils.remove(symlink) | 420 | bb.utils.remove(symlink) |
404 | try: | 421 | try: |
@@ -406,21 +423,30 @@ class Cache(NoCache): | |||
406 | except OSError: | 423 | except OSError: |
407 | pass | 424 | pass |
408 | 425 | ||
409 | def load_cachefile(self): | 426 | return loaded |
410 | cachesize = 0 | 427 | |
411 | previous_progress = 0 | 428 | def cachesize(self): |
412 | previous_percent = 0 | 429 | if not self.has_cache: |
430 | return 0 | ||
413 | 431 | ||
414 | # Calculate the correct cachesize of all those cache files | 432 | cachesize = 0 |
415 | for cache_class in self.caches_array: | 433 | for cache_class in self.caches_array: |
416 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 434 | cachefile = self.getCacheFile(cache_class.cachefile) |
417 | with open(cachefile, "rb") as cachefile: | 435 | try: |
418 | cachesize += os.fstat(cachefile.fileno()).st_size | 436 | with open(cachefile, "rb") as cachefile: |
437 | cachesize += os.fstat(cachefile.fileno()).st_size | ||
438 | except FileNotFoundError: | ||
439 | pass | ||
419 | 440 | ||
420 | bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) | 441 | return cachesize |
442 | |||
443 | def load_cachefile(self, progress): | ||
444 | cachesize = self.cachesize() | ||
445 | previous_progress = 0 | ||
446 | previous_percent = 0 | ||
421 | 447 | ||
422 | for cache_class in self.caches_array: | 448 | for cache_class in self.caches_array: |
423 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 449 | cachefile = self.getCacheFile(cache_class.cachefile) |
424 | logger.debug(1, 'Loading cache file: %s' % cachefile) | 450 | logger.debug(1, 'Loading cache file: %s' % cachefile) |
425 | with open(cachefile, "rb") as cachefile: | 451 | with open(cachefile, "rb") as cachefile: |
426 | pickled = pickle.Unpickler(cachefile) | 452 | pickled = pickle.Unpickler(cachefile) |
@@ -460,23 +486,11 @@ class Cache(NoCache): | |||
460 | self.depends_cache[key] = [value] | 486 | self.depends_cache[key] = [value] |
461 | # only fire events on even percentage boundaries | 487 | # only fire events on even percentage boundaries |
462 | current_progress = cachefile.tell() + previous_progress | 488 | current_progress = cachefile.tell() + previous_progress |
463 | if current_progress > cachesize: | 489 | progress(cachefile.tell() + previous_progress) |
464 | # we might have calculated incorrect total size because a file | ||
465 | # might've been written out just after we checked its size | ||
466 | cachesize = current_progress | ||
467 | current_percent = 100 * current_progress / cachesize | ||
468 | if current_percent > previous_percent: | ||
469 | previous_percent = current_percent | ||
470 | bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), | ||
471 | self.data) | ||
472 | 490 | ||
473 | previous_progress += current_progress | 491 | previous_progress += current_progress |
474 | 492 | ||
475 | # Note: depends cache number is corresponding to the parsing file numbers. | 493 | return len(self.depends_cache) |
476 | # The same file has several caches, still regarded as one item in the cache | ||
477 | bb.event.fire(bb.event.CacheLoadCompleted(cachesize, | ||
478 | len(self.depends_cache)), | ||
479 | self.data) | ||
480 | 494 | ||
481 | def parse(self, filename, appends): | 495 | def parse(self, filename, appends): |
482 | """Parse the specified filename, returning the recipe information""" | 496 | """Parse the specified filename, returning the recipe information""" |
@@ -682,7 +696,7 @@ class Cache(NoCache): | |||
682 | 696 | ||
683 | for cache_class in self.caches_array: | 697 | for cache_class in self.caches_array: |
684 | cache_class_name = cache_class.__name__ | 698 | cache_class_name = cache_class.__name__ |
685 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 699 | cachefile = self.getCacheFile(cache_class.cachefile) |
686 | with open(cachefile, "wb") as f: | 700 | with open(cachefile, "wb") as f: |
687 | p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL) | 701 | p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL) |
688 | p.dump(__cache_version__) | 702 | p.dump(__cache_version__) |
@@ -701,7 +715,7 @@ class Cache(NoCache): | |||
701 | return bb.parse.cached_mtime_noerror(cachefile) | 715 | return bb.parse.cached_mtime_noerror(cachefile) |
702 | 716 | ||
703 | def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None): | 717 | def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None): |
704 | if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped): | 718 | if cacheData is not None and isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped): |
705 | cacheData.add_from_recipeinfo(filename, info_array) | 719 | cacheData.add_from_recipeinfo(filename, info_array) |
706 | 720 | ||
707 | if watcher: | 721 | if watcher: |
@@ -727,6 +741,65 @@ class Cache(NoCache): | |||
727 | info_array.append(cache_class(realfn, data)) | 741 | info_array.append(cache_class(realfn, data)) |
728 | self.add_info(file_name, info_array, cacheData, parsed) | 742 | self.add_info(file_name, info_array, cacheData, parsed) |
729 | 743 | ||
744 | class MulticonfigCache(Mapping): | ||
745 | def __init__(self, databuilder, data_hash, caches_array): | ||
746 | def progress(p): | ||
747 | nonlocal current_progress | ||
748 | nonlocal previous_progress | ||
749 | nonlocal previous_percent | ||
750 | nonlocal cachesize | ||
751 | |||
752 | current_progress = previous_progress + p | ||
753 | |||
754 | if current_progress > cachesize: | ||
755 | # we might have calculated incorrect total size because a file | ||
756 | # might've been written out just after we checked its size | ||
757 | cachesize = current_progress | ||
758 | current_percent = 100 * current_progress / cachesize | ||
759 | if current_percent > previous_percent: | ||
760 | previous_percent = current_percent | ||
761 | bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), | ||
762 | databuilder.data) | ||
763 | |||
764 | |||
765 | cachesize = 0 | ||
766 | current_progress = 0 | ||
767 | previous_progress = 0 | ||
768 | previous_percent = 0 | ||
769 | self.__caches = {} | ||
770 | |||
771 | for mc, mcdata in databuilder.mcdata.items(): | ||
772 | self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array) | ||
773 | |||
774 | cachesize += self.__caches[mc].cachesize() | ||
775 | |||
776 | bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data) | ||
777 | loaded = 0 | ||
778 | |||
779 | for c in self.__caches.values(): | ||
780 | loaded += c.prepare_cache(progress) | ||
781 | previous_progress = current_progress | ||
782 | |||
783 | # Note: depends cache number is corresponding to the parsing file numbers. | ||
784 | # The same file has several caches, still regarded as one item in the cache | ||
785 | bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data) | ||
786 | |||
787 | def __len__(self): | ||
788 | return len(self.__caches) | ||
789 | |||
790 | def __getitem__(self, key): | ||
791 | return self.__caches[key] | ||
792 | |||
793 | def __contains__(self, key): | ||
794 | return key in self.__caches | ||
795 | |||
796 | def __iter__(self): | ||
797 | for k in self.__caches: | ||
798 | yield k | ||
799 | |||
800 | def keys(self): | ||
801 | return self.__caches[key] | ||
802 | |||
730 | 803 | ||
731 | def init(cooker): | 804 | def init(cooker): |
732 | """ | 805 | """ |