diff options
Diffstat (limited to 'bitbake')
-rw-r--r-- | bitbake/lib/bb/cache.py | 143 | ||||
-rw-r--r-- | bitbake/lib/bb/cooker.py | 42 |
2 files changed, 133 insertions, 52 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index aa5ec5b591..954418384b 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -19,7 +19,7 @@ | |||
19 | import os | 19 | import os |
20 | import logging | 20 | import logging |
21 | import pickle | 21 | import pickle |
22 | from collections import defaultdict | 22 | from collections import defaultdict, Mapping |
23 | import bb.utils | 23 | import bb.utils |
24 | import re | 24 | import re |
25 | 25 | ||
@@ -27,8 +27,11 @@ logger = logging.getLogger("BitBake.Cache") | |||
27 | 27 | ||
28 | __cache_version__ = "152" | 28 | __cache_version__ = "152" |
29 | 29 | ||
30 | def getCacheFile(path, filename, data_hash): | 30 | def getCacheFile(path, filename, mc, data_hash): |
31 | return os.path.join(path, filename + "." + data_hash) | 31 | mcspec = '' |
32 | if mc: | ||
33 | mcspec = ".%s" % mc | ||
34 | return os.path.join(path, filename + mcspec + "." + data_hash) | ||
32 | 35 | ||
33 | # RecipeInfoCommon defines common data retrieving methods | 36 | # RecipeInfoCommon defines common data retrieving methods |
34 | # from meta data for caches. CoreRecipeInfo as well as other | 37 | # from meta data for caches. CoreRecipeInfo as well as other |
@@ -354,14 +357,14 @@ class Cache(NoCache): | |||
354 | """ | 357 | """ |
355 | BitBake Cache implementation | 358 | BitBake Cache implementation |
356 | """ | 359 | """ |
357 | 360 | def __init__(self, databuilder, mc, data_hash, caches_array): | |
358 | def __init__(self, databuilder, data_hash, caches_array): | ||
359 | super().__init__(databuilder) | 361 | super().__init__(databuilder) |
360 | data = databuilder.data | 362 | data = databuilder.data |
361 | 363 | ||
362 | # Pass caches_array information into Cache Constructor | 364 | # Pass caches_array information into Cache Constructor |
363 | # It will be used later for deciding whether we | 365 | # It will be used later for deciding whether we |
364 | # need extra cache file dump/load support | 366 | # need extra cache file dump/load support |
367 | self.mc = mc | ||
365 | self.caches_array = caches_array | 368 | self.caches_array = caches_array |
366 | self.cachedir = data.getVar("CACHE") | 369 | self.cachedir = data.getVar("CACHE") |
367 | self.clean = set() | 370 | self.clean = set() |
@@ -379,7 +382,17 @@ class Cache(NoCache): | |||
379 | return | 382 | return |
380 | 383 | ||
381 | self.has_cache = True | 384 | self.has_cache = True |
382 | self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash) | 385 | |
386 | def getCacheFile(self, cachefile): | ||
387 | return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash) | ||
388 | |||
389 | def prepare_cache(self, progress): | ||
390 | if not self.has_cache: | ||
391 | return 0 | ||
392 | |||
393 | loaded = 0 | ||
394 | |||
395 | self.cachefile = self.getCacheFile("bb_cache.dat") | ||
383 | 396 | ||
384 | logger.debug(1, "Cache dir: %s", self.cachedir) | 397 | logger.debug(1, "Cache dir: %s", self.cachedir) |
385 | bb.utils.mkdirhier(self.cachedir) | 398 | bb.utils.mkdirhier(self.cachedir) |
@@ -387,18 +400,22 @@ class Cache(NoCache): | |||
387 | cache_ok = True | 400 | cache_ok = True |
388 | if self.caches_array: | 401 | if self.caches_array: |
389 | for cache_class in self.caches_array: | 402 | for cache_class in self.caches_array: |
390 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 403 | cachefile = self.getCacheFile(cache_class.cachefile) |
391 | cache_ok = cache_ok and os.path.exists(cachefile) | 404 | cache_ok = cache_ok and os.path.exists(cachefile) |
392 | cache_class.init_cacheData(self) | 405 | cache_class.init_cacheData(self) |
393 | if cache_ok: | 406 | if cache_ok: |
394 | self.load_cachefile() | 407 | loaded = self.load_cachefile(progress) |
395 | elif os.path.isfile(self.cachefile): | 408 | elif os.path.isfile(self.cachefile): |
396 | logger.info("Out of date cache found, rebuilding...") | 409 | logger.info("Out of date cache found, rebuilding...") |
397 | else: | 410 | else: |
398 | logger.debug(1, "Cache file %s not found, building..." % self.cachefile) | 411 | logger.debug(1, "Cache file %s not found, building..." % self.cachefile) |
399 | 412 | ||
400 | # We don't use the symlink, its just for debugging convinience | 413 | # We don't use the symlink, its just for debugging convinience |
401 | symlink = os.path.join(self.cachedir, "bb_cache.dat") | 414 | if self.mc: |
415 | symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc) | ||
416 | else: | ||
417 | symlink = os.path.join(self.cachedir, "bb_cache.dat") | ||
418 | |||
402 | if os.path.exists(symlink): | 419 | if os.path.exists(symlink): |
403 | bb.utils.remove(symlink) | 420 | bb.utils.remove(symlink) |
404 | try: | 421 | try: |
@@ -406,21 +423,30 @@ class Cache(NoCache): | |||
406 | except OSError: | 423 | except OSError: |
407 | pass | 424 | pass |
408 | 425 | ||
409 | def load_cachefile(self): | 426 | return loaded |
410 | cachesize = 0 | 427 | |
411 | previous_progress = 0 | 428 | def cachesize(self): |
412 | previous_percent = 0 | 429 | if not self.has_cache: |
430 | return 0 | ||
413 | 431 | ||
414 | # Calculate the correct cachesize of all those cache files | 432 | cachesize = 0 |
415 | for cache_class in self.caches_array: | 433 | for cache_class in self.caches_array: |
416 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 434 | cachefile = self.getCacheFile(cache_class.cachefile) |
417 | with open(cachefile, "rb") as cachefile: | 435 | try: |
418 | cachesize += os.fstat(cachefile.fileno()).st_size | 436 | with open(cachefile, "rb") as cachefile: |
437 | cachesize += os.fstat(cachefile.fileno()).st_size | ||
438 | except FileNotFoundError: | ||
439 | pass | ||
419 | 440 | ||
420 | bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data) | 441 | return cachesize |
442 | |||
443 | def load_cachefile(self, progress): | ||
444 | cachesize = self.cachesize() | ||
445 | previous_progress = 0 | ||
446 | previous_percent = 0 | ||
421 | 447 | ||
422 | for cache_class in self.caches_array: | 448 | for cache_class in self.caches_array: |
423 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 449 | cachefile = self.getCacheFile(cache_class.cachefile) |
424 | logger.debug(1, 'Loading cache file: %s' % cachefile) | 450 | logger.debug(1, 'Loading cache file: %s' % cachefile) |
425 | with open(cachefile, "rb") as cachefile: | 451 | with open(cachefile, "rb") as cachefile: |
426 | pickled = pickle.Unpickler(cachefile) | 452 | pickled = pickle.Unpickler(cachefile) |
@@ -460,23 +486,11 @@ class Cache(NoCache): | |||
460 | self.depends_cache[key] = [value] | 486 | self.depends_cache[key] = [value] |
461 | # only fire events on even percentage boundaries | 487 | # only fire events on even percentage boundaries |
462 | current_progress = cachefile.tell() + previous_progress | 488 | current_progress = cachefile.tell() + previous_progress |
463 | if current_progress > cachesize: | 489 | progress(cachefile.tell() + previous_progress) |
464 | # we might have calculated incorrect total size because a file | ||
465 | # might've been written out just after we checked its size | ||
466 | cachesize = current_progress | ||
467 | current_percent = 100 * current_progress / cachesize | ||
468 | if current_percent > previous_percent: | ||
469 | previous_percent = current_percent | ||
470 | bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), | ||
471 | self.data) | ||
472 | 490 | ||
473 | previous_progress += current_progress | 491 | previous_progress += current_progress |
474 | 492 | ||
475 | # Note: depends cache number is corresponding to the parsing file numbers. | 493 | return len(self.depends_cache) |
476 | # The same file has several caches, still regarded as one item in the cache | ||
477 | bb.event.fire(bb.event.CacheLoadCompleted(cachesize, | ||
478 | len(self.depends_cache)), | ||
479 | self.data) | ||
480 | 494 | ||
481 | def parse(self, filename, appends): | 495 | def parse(self, filename, appends): |
482 | """Parse the specified filename, returning the recipe information""" | 496 | """Parse the specified filename, returning the recipe information""" |
@@ -682,7 +696,7 @@ class Cache(NoCache): | |||
682 | 696 | ||
683 | for cache_class in self.caches_array: | 697 | for cache_class in self.caches_array: |
684 | cache_class_name = cache_class.__name__ | 698 | cache_class_name = cache_class.__name__ |
685 | cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash) | 699 | cachefile = self.getCacheFile(cache_class.cachefile) |
686 | with open(cachefile, "wb") as f: | 700 | with open(cachefile, "wb") as f: |
687 | p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL) | 701 | p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL) |
688 | p.dump(__cache_version__) | 702 | p.dump(__cache_version__) |
@@ -701,7 +715,7 @@ class Cache(NoCache): | |||
701 | return bb.parse.cached_mtime_noerror(cachefile) | 715 | return bb.parse.cached_mtime_noerror(cachefile) |
702 | 716 | ||
703 | def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None): | 717 | def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None): |
704 | if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped): | 718 | if cacheData is not None and isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped): |
705 | cacheData.add_from_recipeinfo(filename, info_array) | 719 | cacheData.add_from_recipeinfo(filename, info_array) |
706 | 720 | ||
707 | if watcher: | 721 | if watcher: |
@@ -727,6 +741,65 @@ class Cache(NoCache): | |||
727 | info_array.append(cache_class(realfn, data)) | 741 | info_array.append(cache_class(realfn, data)) |
728 | self.add_info(file_name, info_array, cacheData, parsed) | 742 | self.add_info(file_name, info_array, cacheData, parsed) |
729 | 743 | ||
744 | class MulticonfigCache(Mapping): | ||
745 | def __init__(self, databuilder, data_hash, caches_array): | ||
746 | def progress(p): | ||
747 | nonlocal current_progress | ||
748 | nonlocal previous_progress | ||
749 | nonlocal previous_percent | ||
750 | nonlocal cachesize | ||
751 | |||
752 | current_progress = previous_progress + p | ||
753 | |||
754 | if current_progress > cachesize: | ||
755 | # we might have calculated incorrect total size because a file | ||
756 | # might've been written out just after we checked its size | ||
757 | cachesize = current_progress | ||
758 | current_percent = 100 * current_progress / cachesize | ||
759 | if current_percent > previous_percent: | ||
760 | previous_percent = current_percent | ||
761 | bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize), | ||
762 | databuilder.data) | ||
763 | |||
764 | |||
765 | cachesize = 0 | ||
766 | current_progress = 0 | ||
767 | previous_progress = 0 | ||
768 | previous_percent = 0 | ||
769 | self.__caches = {} | ||
770 | |||
771 | for mc, mcdata in databuilder.mcdata.items(): | ||
772 | self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array) | ||
773 | |||
774 | cachesize += self.__caches[mc].cachesize() | ||
775 | |||
776 | bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data) | ||
777 | loaded = 0 | ||
778 | |||
779 | for c in self.__caches.values(): | ||
780 | loaded += c.prepare_cache(progress) | ||
781 | previous_progress = current_progress | ||
782 | |||
783 | # Note: depends cache number is corresponding to the parsing file numbers. | ||
784 | # The same file has several caches, still regarded as one item in the cache | ||
785 | bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data) | ||
786 | |||
787 | def __len__(self): | ||
788 | return len(self.__caches) | ||
789 | |||
790 | def __getitem__(self, key): | ||
791 | return self.__caches[key] | ||
792 | |||
793 | def __contains__(self, key): | ||
794 | return key in self.__caches | ||
795 | |||
796 | def __iter__(self): | ||
797 | for k in self.__caches: | ||
798 | yield k | ||
799 | |||
800 | def keys(self): | ||
801 | return self.__caches[key] | ||
802 | |||
730 | 803 | ||
731 | def init(cooker): | 804 | def init(cooker): |
732 | """ | 805 | """ |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 8f45233c8d..50526d52b2 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
@@ -541,8 +541,8 @@ class BBCooker: | |||
541 | 541 | ||
542 | if fn: | 542 | if fn: |
543 | try: | 543 | try: |
544 | bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array) | 544 | bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) |
545 | envdata = bb_cache.loadDataFull(fn, self.collections[mc].get_file_appends(fn)) | 545 | envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn)) |
546 | except Exception as e: | 546 | except Exception as e: |
547 | parselog.exception("Unable to read %s", fn) | 547 | parselog.exception("Unable to read %s", fn) |
548 | raise | 548 | raise |
@@ -1328,9 +1328,9 @@ class BBCooker: | |||
1328 | self.buildSetVars() | 1328 | self.buildSetVars() |
1329 | self.reset_mtime_caches() | 1329 | self.reset_mtime_caches() |
1330 | 1330 | ||
1331 | bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array) | 1331 | bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) |
1332 | 1332 | ||
1333 | infos = bb_cache.parse(fn, self.collections[mc].get_file_appends(fn)) | 1333 | infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn)) |
1334 | infos = dict(infos) | 1334 | infos = dict(infos) |
1335 | 1335 | ||
1336 | fn = bb.cache.realfn2virtual(fn, cls, mc) | 1336 | fn = bb.cache.realfn2virtual(fn, cls, mc) |
@@ -1968,7 +1968,7 @@ class Parser(multiprocessing.Process): | |||
1968 | except queue.Full: | 1968 | except queue.Full: |
1969 | pending.append(result) | 1969 | pending.append(result) |
1970 | 1970 | ||
1971 | def parse(self, filename, appends): | 1971 | def parse(self, mc, cache, filename, appends): |
1972 | try: | 1972 | try: |
1973 | origfilter = bb.event.LogHandler.filter | 1973 | origfilter = bb.event.LogHandler.filter |
1974 | # Record the filename we're parsing into any events generated | 1974 | # Record the filename we're parsing into any events generated |
@@ -1982,7 +1982,7 @@ class Parser(multiprocessing.Process): | |||
1982 | bb.event.set_class_handlers(self.handlers.copy()) | 1982 | bb.event.set_class_handlers(self.handlers.copy()) |
1983 | bb.event.LogHandler.filter = parse_filter | 1983 | bb.event.LogHandler.filter = parse_filter |
1984 | 1984 | ||
1985 | return True, self.bb_cache.parse(filename, appends) | 1985 | return True, mc, cache.parse(filename, appends) |
1986 | except Exception as exc: | 1986 | except Exception as exc: |
1987 | tb = sys.exc_info()[2] | 1987 | tb = sys.exc_info()[2] |
1988 | exc.recipe = filename | 1988 | exc.recipe = filename |
@@ -2016,16 +2016,16 @@ class CookerParser(object): | |||
2016 | self.current = 0 | 2016 | self.current = 0 |
2017 | self.process_names = [] | 2017 | self.process_names = [] |
2018 | 2018 | ||
2019 | self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array) | 2019 | self.bb_caches = bb.cache.MulticonfigCache(self.cfgbuilder, self.cfghash, cooker.caches_array) |
2020 | self.fromcache = set() | 2020 | self.fromcache = set() |
2021 | self.willparse = set() | 2021 | self.willparse = set() |
2022 | for mc in self.cooker.multiconfigs: | 2022 | for mc in self.cooker.multiconfigs: |
2023 | for filename in self.mcfilelist[mc]: | 2023 | for filename in self.mcfilelist[mc]: |
2024 | appends = self.cooker.collections[mc].get_file_appends(filename) | 2024 | appends = self.cooker.collections[mc].get_file_appends(filename) |
2025 | if not self.bb_cache.cacheValid(filename, appends): | 2025 | if not self.bb_caches[mc].cacheValid(filename, appends): |
2026 | self.willparse.add((filename, appends)) | 2026 | self.willparse.add((mc, self.bb_caches[mc], filename, appends)) |
2027 | else: | 2027 | else: |
2028 | self.fromcache.add((filename, appends)) | 2028 | self.fromcache.add((mc, self.bb_caches[mc], filename, appends)) |
2029 | 2029 | ||
2030 | self.total = len(self.fromcache) + len(self.willparse) | 2030 | self.total = len(self.fromcache) + len(self.willparse) |
2031 | self.toparse = len(self.willparse) | 2031 | self.toparse = len(self.willparse) |
@@ -2043,7 +2043,6 @@ class CookerParser(object): | |||
2043 | if self.toparse: | 2043 | if self.toparse: |
2044 | bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) | 2044 | bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) |
2045 | def init(): | 2045 | def init(): |
2046 | Parser.bb_cache = self.bb_cache | ||
2047 | bb.utils.set_process_name(multiprocessing.current_process().name) | 2046 | bb.utils.set_process_name(multiprocessing.current_process().name) |
2048 | multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1) | 2047 | multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1) |
2049 | multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1) | 2048 | multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1) |
@@ -2099,7 +2098,11 @@ class CookerParser(object): | |||
2099 | else: | 2098 | else: |
2100 | process.join() | 2099 | process.join() |
2101 | 2100 | ||
2102 | sync = threading.Thread(target=self.bb_cache.sync) | 2101 | def sync_caches(): |
2102 | for c in self.bb_caches.values(): | ||
2103 | c.sync() | ||
2104 | |||
2105 | sync = threading.Thread(target=sync_caches) | ||
2103 | sync.start() | 2106 | sync.start() |
2104 | multiprocessing.util.Finalize(None, sync.join, exitpriority=-100) | 2107 | multiprocessing.util.Finalize(None, sync.join, exitpriority=-100) |
2105 | bb.codeparser.parser_cache_savemerge() | 2108 | bb.codeparser.parser_cache_savemerge() |
@@ -2116,8 +2119,8 @@ class CookerParser(object): | |||
2116 | print("Processed parsing statistics saved to %s" % (pout)) | 2119 | print("Processed parsing statistics saved to %s" % (pout)) |
2117 | 2120 | ||
2118 | def load_cached(self): | 2121 | def load_cached(self): |
2119 | for mc, filename, appends in self.fromcache: | 2122 | for mc, cache, filename, appends in self.fromcache: |
2120 | cached, infos = self.bb_cache.load(mc, filename, appends) | 2123 | cached, infos = cache.load(filename, appends) |
2121 | yield not cached, mc, infos | 2124 | yield not cached, mc, infos |
2122 | 2125 | ||
2123 | def parse_generator(self): | 2126 | def parse_generator(self): |
@@ -2196,8 +2199,13 @@ class CookerParser(object): | |||
2196 | if info_array[0].skipped: | 2199 | if info_array[0].skipped: |
2197 | self.skipped += 1 | 2200 | self.skipped += 1 |
2198 | self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) | 2201 | self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) |
2199 | (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn) | 2202 | (fn, cls, fnmc) = bb.cache.virtualfn2realfn(virtualfn) |
2200 | self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc], | 2203 | |
2204 | if fnmc == mc: | ||
2205 | cache = self.cooker.recipecaches[mc] | ||
2206 | else: | ||
2207 | cache = None | ||
2208 | self.bb_caches[mc].add_info(virtualfn, info_array, cache, | ||
2201 | parsed=parsed, watcher = self.cooker.add_filewatch) | 2209 | parsed=parsed, watcher = self.cooker.add_filewatch) |
2202 | return True | 2210 | return True |
2203 | 2211 | ||
@@ -2207,6 +2215,6 @@ class CookerParser(object): | |||
2207 | to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename))) | 2215 | to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename))) |
2208 | 2216 | ||
2209 | for mc, filename, appends in to_reparse: | 2217 | for mc, filename, appends in to_reparse: |
2210 | infos = self.bb_cache.parse(filename, appends) | 2218 | infos = self.bb_caches[mc].parse(filename, appends) |
2211 | for vfn, info_array in infos: | 2219 | for vfn, info_array in infos: |
2212 | self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) | 2220 | self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) |