diff options
author | Joshua Watt <JPEWhacker@gmail.com> | 2020-06-05 22:15:33 -0500 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2020-06-10 12:30:01 +0100 |
commit | 3ec9d5774c56eec42ee6aa5395838a82d3a88bf0 (patch) | |
tree | 8bee98337d3307f1c2b91b606eec08bd6fee2f5f /bitbake | |
parent | 7dffeb6ffd82e0cee1ab761ab0f2b0415bddb349 (diff) | |
download | poky-3ec9d5774c56eec42ee6aa5395838a82d3a88bf0.tar.gz |
bitbake: bitbake: cache: Improve logging
Improves the logging of Cache objects by prefixing the log messages with
the multiconfig name of the cache, so as to distinguish between multiple
instances of the class. Also adds a more log messages.
(Bitbake rev: 74fd10b33c66f4142d6eff6531200f7620a06ae0)
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake')
-rw-r--r-- | bitbake/lib/bb/cache.py | 65 |
1 files changed, 35 insertions, 30 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index 954418384b..b34bfa9b5a 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
@@ -21,6 +21,7 @@ import logging | |||
21 | import pickle | 21 | import pickle |
22 | from collections import defaultdict, Mapping | 22 | from collections import defaultdict, Mapping |
23 | import bb.utils | 23 | import bb.utils |
24 | from bb import PrefixLoggerAdapter | ||
24 | import re | 25 | import re |
25 | 26 | ||
26 | logger = logging.getLogger("BitBake.Cache") | 27 | logger = logging.getLogger("BitBake.Cache") |
@@ -365,6 +366,7 @@ class Cache(NoCache): | |||
365 | # It will be used later for deciding whether we | 366 | # It will be used later for deciding whether we |
366 | # need extra cache file dump/load support | 367 | # need extra cache file dump/load support |
367 | self.mc = mc | 368 | self.mc = mc |
369 | self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger) | ||
368 | self.caches_array = caches_array | 370 | self.caches_array = caches_array |
369 | self.cachedir = data.getVar("CACHE") | 371 | self.cachedir = data.getVar("CACHE") |
370 | self.clean = set() | 372 | self.clean = set() |
@@ -377,8 +379,8 @@ class Cache(NoCache): | |||
377 | 379 | ||
378 | if self.cachedir in [None, '']: | 380 | if self.cachedir in [None, '']: |
379 | self.has_cache = False | 381 | self.has_cache = False |
380 | logger.info("Not using a cache. " | 382 | self.logger.info("Not using a cache. " |
381 | "Set CACHE = <directory> to enable.") | 383 | "Set CACHE = <directory> to enable.") |
382 | return | 384 | return |
383 | 385 | ||
384 | self.has_cache = True | 386 | self.has_cache = True |
@@ -394,21 +396,23 @@ class Cache(NoCache): | |||
394 | 396 | ||
395 | self.cachefile = self.getCacheFile("bb_cache.dat") | 397 | self.cachefile = self.getCacheFile("bb_cache.dat") |
396 | 398 | ||
397 | logger.debug(1, "Cache dir: %s", self.cachedir) | 399 | self.logger.debug(1, "Cache dir: %s", self.cachedir) |
398 | bb.utils.mkdirhier(self.cachedir) | 400 | bb.utils.mkdirhier(self.cachedir) |
399 | 401 | ||
400 | cache_ok = True | 402 | cache_ok = True |
401 | if self.caches_array: | 403 | if self.caches_array: |
402 | for cache_class in self.caches_array: | 404 | for cache_class in self.caches_array: |
403 | cachefile = self.getCacheFile(cache_class.cachefile) | 405 | cachefile = self.getCacheFile(cache_class.cachefile) |
404 | cache_ok = cache_ok and os.path.exists(cachefile) | 406 | cache_exists = os.path.exists(cachefile) |
407 | self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists) | ||
408 | cache_ok = cache_ok and cache_exists | ||
405 | cache_class.init_cacheData(self) | 409 | cache_class.init_cacheData(self) |
406 | if cache_ok: | 410 | if cache_ok: |
407 | loaded = self.load_cachefile(progress) | 411 | loaded = self.load_cachefile(progress) |
408 | elif os.path.isfile(self.cachefile): | 412 | elif os.path.isfile(self.cachefile): |
409 | logger.info("Out of date cache found, rebuilding...") | 413 | self.logger.info("Out of date cache found, rebuilding...") |
410 | else: | 414 | else: |
411 | logger.debug(1, "Cache file %s not found, building..." % self.cachefile) | 415 | self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile) |
412 | 416 | ||
413 | # We don't use the symlink, its just for debugging convinience | 417 | # We don't use the symlink, its just for debugging convinience |
414 | if self.mc: | 418 | if self.mc: |
@@ -447,7 +451,7 @@ class Cache(NoCache): | |||
447 | 451 | ||
448 | for cache_class in self.caches_array: | 452 | for cache_class in self.caches_array: |
449 | cachefile = self.getCacheFile(cache_class.cachefile) | 453 | cachefile = self.getCacheFile(cache_class.cachefile) |
450 | logger.debug(1, 'Loading cache file: %s' % cachefile) | 454 | self.logger.debug(1, 'Loading cache file: %s' % cachefile) |
451 | with open(cachefile, "rb") as cachefile: | 455 | with open(cachefile, "rb") as cachefile: |
452 | pickled = pickle.Unpickler(cachefile) | 456 | pickled = pickle.Unpickler(cachefile) |
453 | # Check cache version information | 457 | # Check cache version information |
@@ -455,14 +459,14 @@ class Cache(NoCache): | |||
455 | cache_ver = pickled.load() | 459 | cache_ver = pickled.load() |
456 | bitbake_ver = pickled.load() | 460 | bitbake_ver = pickled.load() |
457 | except Exception: | 461 | except Exception: |
458 | logger.info('Invalid cache, rebuilding...') | 462 | self.logger.info('Invalid cache, rebuilding...') |
459 | return | 463 | return |
460 | 464 | ||
461 | if cache_ver != __cache_version__: | 465 | if cache_ver != __cache_version__: |
462 | logger.info('Cache version mismatch, rebuilding...') | 466 | self.logger.info('Cache version mismatch, rebuilding...') |
463 | return | 467 | return |
464 | elif bitbake_ver != bb.__version__: | 468 | elif bitbake_ver != bb.__version__: |
465 | logger.info('Bitbake version mismatch, rebuilding...') | 469 | self.logger.info('Bitbake version mismatch, rebuilding...') |
466 | return | 470 | return |
467 | 471 | ||
468 | # Load the rest of the cache file | 472 | # Load the rest of the cache file |
@@ -494,7 +498,7 @@ class Cache(NoCache): | |||
494 | 498 | ||
495 | def parse(self, filename, appends): | 499 | def parse(self, filename, appends): |
496 | """Parse the specified filename, returning the recipe information""" | 500 | """Parse the specified filename, returning the recipe information""" |
497 | logger.debug(1, "Parsing %s", filename) | 501 | self.logger.debug(1, "Parsing %s", filename) |
498 | infos = [] | 502 | infos = [] |
499 | datastores = self.load_bbfile(filename, appends) | 503 | datastores = self.load_bbfile(filename, appends) |
500 | depends = [] | 504 | depends = [] |
@@ -548,7 +552,7 @@ class Cache(NoCache): | |||
548 | cached, infos = self.load(fn, appends) | 552 | cached, infos = self.load(fn, appends) |
549 | for virtualfn, info_array in infos: | 553 | for virtualfn, info_array in infos: |
550 | if info_array[0].skipped: | 554 | if info_array[0].skipped: |
551 | logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason) | 555 | self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason) |
552 | skipped += 1 | 556 | skipped += 1 |
553 | else: | 557 | else: |
554 | self.add_info(virtualfn, info_array, cacheData, not cached) | 558 | self.add_info(virtualfn, info_array, cacheData, not cached) |
@@ -584,21 +588,21 @@ class Cache(NoCache): | |||
584 | 588 | ||
585 | # File isn't in depends_cache | 589 | # File isn't in depends_cache |
586 | if not fn in self.depends_cache: | 590 | if not fn in self.depends_cache: |
587 | logger.debug(2, "Cache: %s is not cached", fn) | 591 | self.logger.debug(2, "%s is not cached", fn) |
588 | return False | 592 | return False |
589 | 593 | ||
590 | mtime = bb.parse.cached_mtime_noerror(fn) | 594 | mtime = bb.parse.cached_mtime_noerror(fn) |
591 | 595 | ||
592 | # Check file still exists | 596 | # Check file still exists |
593 | if mtime == 0: | 597 | if mtime == 0: |
594 | logger.debug(2, "Cache: %s no longer exists", fn) | 598 | self.logger.debug(2, "%s no longer exists", fn) |
595 | self.remove(fn) | 599 | self.remove(fn) |
596 | return False | 600 | return False |
597 | 601 | ||
598 | info_array = self.depends_cache[fn] | 602 | info_array = self.depends_cache[fn] |
599 | # Check the file's timestamp | 603 | # Check the file's timestamp |
600 | if mtime != info_array[0].timestamp: | 604 | if mtime != info_array[0].timestamp: |
601 | logger.debug(2, "Cache: %s changed", fn) | 605 | self.logger.debug(2, "%s changed", fn) |
602 | self.remove(fn) | 606 | self.remove(fn) |
603 | return False | 607 | return False |
604 | 608 | ||
@@ -609,14 +613,14 @@ class Cache(NoCache): | |||
609 | fmtime = bb.parse.cached_mtime_noerror(f) | 613 | fmtime = bb.parse.cached_mtime_noerror(f) |
610 | # Check if file still exists | 614 | # Check if file still exists |
611 | if old_mtime != 0 and fmtime == 0: | 615 | if old_mtime != 0 and fmtime == 0: |
612 | logger.debug(2, "Cache: %s's dependency %s was removed", | 616 | self.logger.debug(2, "%s's dependency %s was removed", |
613 | fn, f) | 617 | fn, f) |
614 | self.remove(fn) | 618 | self.remove(fn) |
615 | return False | 619 | return False |
616 | 620 | ||
617 | if (fmtime != old_mtime): | 621 | if (fmtime != old_mtime): |
618 | logger.debug(2, "Cache: %s's dependency %s changed", | 622 | self.logger.debug(2, "%s's dependency %s changed", |
619 | fn, f) | 623 | fn, f) |
620 | self.remove(fn) | 624 | self.remove(fn) |
621 | return False | 625 | return False |
622 | 626 | ||
@@ -632,14 +636,14 @@ class Cache(NoCache): | |||
632 | continue | 636 | continue |
633 | f, exist = f.split(":") | 637 | f, exist = f.split(":") |
634 | if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): | 638 | if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): |
635 | logger.debug(2, "Cache: %s's file checksum list file %s changed", | 639 | self.logger.debug(2, "%s's file checksum list file %s changed", |
636 | fn, f) | 640 | fn, f) |
637 | self.remove(fn) | 641 | self.remove(fn) |
638 | return False | 642 | return False |
639 | 643 | ||
640 | if tuple(appends) != tuple(info_array[0].appends): | 644 | if tuple(appends) != tuple(info_array[0].appends): |
641 | logger.debug(2, "Cache: appends for %s changed", fn) | 645 | self.logger.debug(2, "appends for %s changed", fn) |
642 | logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends))) | 646 | self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends))) |
643 | self.remove(fn) | 647 | self.remove(fn) |
644 | return False | 648 | return False |
645 | 649 | ||
@@ -648,10 +652,10 @@ class Cache(NoCache): | |||
648 | virtualfn = variant2virtual(fn, cls) | 652 | virtualfn = variant2virtual(fn, cls) |
649 | self.clean.add(virtualfn) | 653 | self.clean.add(virtualfn) |
650 | if virtualfn not in self.depends_cache: | 654 | if virtualfn not in self.depends_cache: |
651 | logger.debug(2, "Cache: %s is not cached", virtualfn) | 655 | self.logger.debug(2, "%s is not cached", virtualfn) |
652 | invalid = True | 656 | invalid = True |
653 | elif len(self.depends_cache[virtualfn]) != len(self.caches_array): | 657 | elif len(self.depends_cache[virtualfn]) != len(self.caches_array): |
654 | logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn) | 658 | self.logger.debug(2, "Extra caches missing for %s?" % virtualfn) |
655 | invalid = True | 659 | invalid = True |
656 | 660 | ||
657 | # If any one of the variants is not present, mark as invalid for all | 661 | # If any one of the variants is not present, mark as invalid for all |
@@ -659,10 +663,10 @@ class Cache(NoCache): | |||
659 | for cls in info_array[0].variants: | 663 | for cls in info_array[0].variants: |
660 | virtualfn = variant2virtual(fn, cls) | 664 | virtualfn = variant2virtual(fn, cls) |
661 | if virtualfn in self.clean: | 665 | if virtualfn in self.clean: |
662 | logger.debug(2, "Cache: Removing %s from cache", virtualfn) | 666 | self.logger.debug(2, "Removing %s from cache", virtualfn) |
663 | self.clean.remove(virtualfn) | 667 | self.clean.remove(virtualfn) |
664 | if fn in self.clean: | 668 | if fn in self.clean: |
665 | logger.debug(2, "Cache: Marking %s as not clean", fn) | 669 | self.logger.debug(2, "Marking %s as not clean", fn) |
666 | self.clean.remove(fn) | 670 | self.clean.remove(fn) |
667 | return False | 671 | return False |
668 | 672 | ||
@@ -675,10 +679,10 @@ class Cache(NoCache): | |||
675 | Called from the parser in error cases | 679 | Called from the parser in error cases |
676 | """ | 680 | """ |
677 | if fn in self.depends_cache: | 681 | if fn in self.depends_cache: |
678 | logger.debug(1, "Removing %s from cache", fn) | 682 | self.logger.debug(1, "Removing %s from cache", fn) |
679 | del self.depends_cache[fn] | 683 | del self.depends_cache[fn] |
680 | if fn in self.clean: | 684 | if fn in self.clean: |
681 | logger.debug(1, "Marking %s as unclean", fn) | 685 | self.logger.debug(1, "Marking %s as unclean", fn) |
682 | self.clean.remove(fn) | 686 | self.clean.remove(fn) |
683 | 687 | ||
684 | def sync(self): | 688 | def sync(self): |
@@ -691,12 +695,13 @@ class Cache(NoCache): | |||
691 | return | 695 | return |
692 | 696 | ||
693 | if self.cacheclean: | 697 | if self.cacheclean: |
694 | logger.debug(2, "Cache is clean, not saving.") | 698 | self.logger.debug(2, "Cache is clean, not saving.") |
695 | return | 699 | return |
696 | 700 | ||
697 | for cache_class in self.caches_array: | 701 | for cache_class in self.caches_array: |
698 | cache_class_name = cache_class.__name__ | 702 | cache_class_name = cache_class.__name__ |
699 | cachefile = self.getCacheFile(cache_class.cachefile) | 703 | cachefile = self.getCacheFile(cache_class.cachefile) |
704 | self.logger.debug(2, "Writing %s", cachefile) | ||
700 | with open(cachefile, "wb") as f: | 705 | with open(cachefile, "wb") as f: |
701 | p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL) | 706 | p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL) |
702 | p.dump(__cache_version__) | 707 | p.dump(__cache_version__) |