diff options
34 files changed, 237 insertions, 228 deletions
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker index 40da7a0c2b..6c37967513 100755 --- a/bitbake/bin/bitbake-worker +++ b/bitbake/bin/bitbake-worker | |||
| @@ -169,7 +169,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha | |||
| 169 | fakedirs = (workerdata["fakerootdirs"][fn] or "").split() | 169 | fakedirs = (workerdata["fakerootdirs"][fn] or "").split() |
| 170 | for p in fakedirs: | 170 | for p in fakedirs: |
| 171 | bb.utils.mkdirhier(p) | 171 | bb.utils.mkdirhier(p) |
| 172 | logger.debug(2, 'Running %s:%s under fakeroot, fakedirs: %s' % | 172 | logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' % |
| 173 | (fn, taskname, ', '.join(fakedirs))) | 173 | (fn, taskname, ', '.join(fakedirs))) |
| 174 | else: | 174 | else: |
| 175 | envvars = (workerdata["fakerootnoenv"][fn] or "").split() | 175 | envvars = (workerdata["fakerootnoenv"][fn] or "").split() |
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index b217737347..99e57a02e4 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
| @@ -21,8 +21,8 @@ class BBHandledException(Exception): | |||
| 21 | The big dilemma for generic bitbake code is what information to give the user | 21 | The big dilemma for generic bitbake code is what information to give the user |
| 22 | when an exception occurs. Any exception inheriting this base exception class | 22 | when an exception occurs. Any exception inheriting this base exception class |
| 23 | has already provided information to the user via some 'fired' message type such as | 23 | has already provided information to the user via some 'fired' message type such as |
| 24 | an explicitly fired event using bb.fire, or a bb.error message. If bitbake | 24 | an explicitly fired event using bb.fire, or a bb.error message. If bitbake |
| 25 | encounters an exception derived from this class, no backtrace or other information | 25 | encounters an exception derived from this class, no backtrace or other information |
| 26 | will be given to the user, its assumed the earlier event provided the relevant information. | 26 | will be given to the user, its assumed the earlier event provided the relevant information. |
| 27 | """ | 27 | """ |
| 28 | pass | 28 | pass |
| @@ -42,7 +42,16 @@ class BBLoggerMixin(object): | |||
| 42 | 42 | ||
| 43 | def setup_bblogger(self, name): | 43 | def setup_bblogger(self, name): |
| 44 | if name.split(".")[0] == "BitBake": | 44 | if name.split(".")[0] == "BitBake": |
| 45 | self.debug = self.bbdebug | 45 | self.debug = self._debug_helper |
| 46 | |||
| 47 | def _debug_helper(self, *args, **kwargs): | ||
| 48 | return self.bbdebug(1, *args, **kwargs) | ||
| 49 | |||
| 50 | def debug2(self, *args, **kwargs): | ||
| 51 | return self.bbdebug(2, *args, **kwargs) | ||
| 52 | |||
| 53 | def debug3(self, *args, **kwargs): | ||
| 54 | return self.bbdebug(3, *args, **kwargs) | ||
| 46 | 55 | ||
| 47 | def bbdebug(self, level, msg, *args, **kwargs): | 56 | def bbdebug(self, level, msg, *args, **kwargs): |
| 48 | loglevel = logging.DEBUG - level + 1 | 57 | loglevel = logging.DEBUG - level + 1 |
| @@ -128,7 +137,7 @@ def debug(lvl, *args): | |||
| 128 | mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) | 137 | mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) |
| 129 | args = (lvl,) + args | 138 | args = (lvl,) + args |
| 130 | lvl = 1 | 139 | lvl = 1 |
| 131 | mainlogger.debug(lvl, ''.join(args)) | 140 | mainlogger.bbdebug(lvl, ''.join(args)) |
| 132 | 141 | ||
| 133 | def note(*args): | 142 | def note(*args): |
| 134 | mainlogger.info(''.join(args)) | 143 | mainlogger.info(''.join(args)) |
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index 974d2ff065..f4f897e41a 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
| @@ -583,7 +583,7 @@ def _exec_task(fn, task, d, quieterr): | |||
| 583 | logger.error("No such task: %s" % task) | 583 | logger.error("No such task: %s" % task) |
| 584 | return 1 | 584 | return 1 |
| 585 | 585 | ||
| 586 | logger.debug(1, "Executing task %s", task) | 586 | logger.debug("Executing task %s", task) |
| 587 | 587 | ||
| 588 | localdata = _task_data(fn, task, d) | 588 | localdata = _task_data(fn, task, d) |
| 589 | tempdir = localdata.getVar('T') | 589 | tempdir = localdata.getVar('T') |
| @@ -596,7 +596,7 @@ def _exec_task(fn, task, d, quieterr): | |||
| 596 | curnice = os.nice(0) | 596 | curnice = os.nice(0) |
| 597 | nice = int(nice) - curnice | 597 | nice = int(nice) - curnice |
| 598 | newnice = os.nice(nice) | 598 | newnice = os.nice(nice) |
| 599 | logger.debug(1, "Renice to %s " % newnice) | 599 | logger.debug("Renice to %s " % newnice) |
| 600 | ionice = localdata.getVar("BB_TASK_IONICE_LEVEL") | 600 | ionice = localdata.getVar("BB_TASK_IONICE_LEVEL") |
| 601 | if ionice: | 601 | if ionice: |
| 602 | try: | 602 | try: |
| @@ -720,7 +720,7 @@ def _exec_task(fn, task, d, quieterr): | |||
| 720 | 720 | ||
| 721 | logfile.close() | 721 | logfile.close() |
| 722 | if os.path.exists(logfn) and os.path.getsize(logfn) == 0: | 722 | if os.path.exists(logfn) and os.path.getsize(logfn) == 0: |
| 723 | logger.debug(2, "Zero size logfn %s, removing", logfn) | 723 | logger.debug2("Zero size logfn %s, removing", logfn) |
| 724 | bb.utils.remove(logfn) | 724 | bb.utils.remove(logfn) |
| 725 | bb.utils.remove(loglink) | 725 | bb.utils.remove(loglink) |
| 726 | event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata) | 726 | event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata) |
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py index c85effd6f0..aea2b8bc11 100644 --- a/bitbake/lib/bb/cache.py +++ b/bitbake/lib/bb/cache.py | |||
| @@ -215,7 +215,7 @@ class CoreRecipeInfo(RecipeInfoCommon): | |||
| 215 | if not self.not_world: | 215 | if not self.not_world: |
| 216 | cachedata.possible_world.append(fn) | 216 | cachedata.possible_world.append(fn) |
| 217 | #else: | 217 | #else: |
| 218 | # logger.debug(2, "EXCLUDE FROM WORLD: %s", fn) | 218 | # logger.debug2("EXCLUDE FROM WORLD: %s", fn) |
| 219 | 219 | ||
| 220 | # create a collection of all targets for sanity checking | 220 | # create a collection of all targets for sanity checking |
| 221 | # tasks, such as upstream versions, license, and tools for | 221 | # tasks, such as upstream versions, license, and tools for |
| @@ -323,7 +323,7 @@ class NoCache(object): | |||
| 323 | Return a complete set of data for fn. | 323 | Return a complete set of data for fn. |
| 324 | To do this, we need to parse the file. | 324 | To do this, we need to parse the file. |
| 325 | """ | 325 | """ |
| 326 | logger.debug(1, "Parsing %s (full)" % virtualfn) | 326 | logger.debug("Parsing %s (full)" % virtualfn) |
| 327 | (fn, virtual, mc) = virtualfn2realfn(virtualfn) | 327 | (fn, virtual, mc) = virtualfn2realfn(virtualfn) |
| 328 | bb_data = self.load_bbfile(virtualfn, appends, virtonly=True) | 328 | bb_data = self.load_bbfile(virtualfn, appends, virtonly=True) |
| 329 | return bb_data[virtual] | 329 | return bb_data[virtual] |
| @@ -400,7 +400,7 @@ class Cache(NoCache): | |||
| 400 | 400 | ||
| 401 | self.cachefile = self.getCacheFile("bb_cache.dat") | 401 | self.cachefile = self.getCacheFile("bb_cache.dat") |
| 402 | 402 | ||
| 403 | self.logger.debug(1, "Cache dir: %s", self.cachedir) | 403 | self.logger.debug("Cache dir: %s", self.cachedir) |
| 404 | bb.utils.mkdirhier(self.cachedir) | 404 | bb.utils.mkdirhier(self.cachedir) |
| 405 | 405 | ||
| 406 | cache_ok = True | 406 | cache_ok = True |
| @@ -408,7 +408,7 @@ class Cache(NoCache): | |||
| 408 | for cache_class in self.caches_array: | 408 | for cache_class in self.caches_array: |
| 409 | cachefile = self.getCacheFile(cache_class.cachefile) | 409 | cachefile = self.getCacheFile(cache_class.cachefile) |
| 410 | cache_exists = os.path.exists(cachefile) | 410 | cache_exists = os.path.exists(cachefile) |
| 411 | self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists) | 411 | self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists) |
| 412 | cache_ok = cache_ok and cache_exists | 412 | cache_ok = cache_ok and cache_exists |
| 413 | cache_class.init_cacheData(self) | 413 | cache_class.init_cacheData(self) |
| 414 | if cache_ok: | 414 | if cache_ok: |
| @@ -416,7 +416,7 @@ class Cache(NoCache): | |||
| 416 | elif os.path.isfile(self.cachefile): | 416 | elif os.path.isfile(self.cachefile): |
| 417 | self.logger.info("Out of date cache found, rebuilding...") | 417 | self.logger.info("Out of date cache found, rebuilding...") |
| 418 | else: | 418 | else: |
| 419 | self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile) | 419 | self.logger.debug("Cache file %s not found, building..." % self.cachefile) |
| 420 | 420 | ||
| 421 | # We don't use the symlink, its just for debugging convinience | 421 | # We don't use the symlink, its just for debugging convinience |
| 422 | if self.mc: | 422 | if self.mc: |
| @@ -453,7 +453,7 @@ class Cache(NoCache): | |||
| 453 | 453 | ||
| 454 | for cache_class in self.caches_array: | 454 | for cache_class in self.caches_array: |
| 455 | cachefile = self.getCacheFile(cache_class.cachefile) | 455 | cachefile = self.getCacheFile(cache_class.cachefile) |
| 456 | self.logger.debug(1, 'Loading cache file: %s' % cachefile) | 456 | self.logger.debug('Loading cache file: %s' % cachefile) |
| 457 | with open(cachefile, "rb") as cachefile: | 457 | with open(cachefile, "rb") as cachefile: |
| 458 | pickled = pickle.Unpickler(cachefile) | 458 | pickled = pickle.Unpickler(cachefile) |
| 459 | # Check cache version information | 459 | # Check cache version information |
| @@ -500,7 +500,7 @@ class Cache(NoCache): | |||
| 500 | 500 | ||
| 501 | def parse(self, filename, appends): | 501 | def parse(self, filename, appends): |
| 502 | """Parse the specified filename, returning the recipe information""" | 502 | """Parse the specified filename, returning the recipe information""" |
| 503 | self.logger.debug(1, "Parsing %s", filename) | 503 | self.logger.debug("Parsing %s", filename) |
| 504 | infos = [] | 504 | infos = [] |
| 505 | datastores = self.load_bbfile(filename, appends, mc=self.mc) | 505 | datastores = self.load_bbfile(filename, appends, mc=self.mc) |
| 506 | depends = [] | 506 | depends = [] |
| @@ -554,7 +554,7 @@ class Cache(NoCache): | |||
| 554 | cached, infos = self.load(fn, appends) | 554 | cached, infos = self.load(fn, appends) |
| 555 | for virtualfn, info_array in infos: | 555 | for virtualfn, info_array in infos: |
| 556 | if info_array[0].skipped: | 556 | if info_array[0].skipped: |
| 557 | self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason) | 557 | self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason) |
| 558 | skipped += 1 | 558 | skipped += 1 |
| 559 | else: | 559 | else: |
| 560 | self.add_info(virtualfn, info_array, cacheData, not cached) | 560 | self.add_info(virtualfn, info_array, cacheData, not cached) |
| @@ -590,21 +590,21 @@ class Cache(NoCache): | |||
| 590 | 590 | ||
| 591 | # File isn't in depends_cache | 591 | # File isn't in depends_cache |
| 592 | if not fn in self.depends_cache: | 592 | if not fn in self.depends_cache: |
| 593 | self.logger.debug(2, "%s is not cached", fn) | 593 | self.logger.debug2("%s is not cached", fn) |
| 594 | return False | 594 | return False |
| 595 | 595 | ||
| 596 | mtime = bb.parse.cached_mtime_noerror(fn) | 596 | mtime = bb.parse.cached_mtime_noerror(fn) |
| 597 | 597 | ||
| 598 | # Check file still exists | 598 | # Check file still exists |
| 599 | if mtime == 0: | 599 | if mtime == 0: |
| 600 | self.logger.debug(2, "%s no longer exists", fn) | 600 | self.logger.debug2("%s no longer exists", fn) |
| 601 | self.remove(fn) | 601 | self.remove(fn) |
| 602 | return False | 602 | return False |
| 603 | 603 | ||
| 604 | info_array = self.depends_cache[fn] | 604 | info_array = self.depends_cache[fn] |
| 605 | # Check the file's timestamp | 605 | # Check the file's timestamp |
| 606 | if mtime != info_array[0].timestamp: | 606 | if mtime != info_array[0].timestamp: |
| 607 | self.logger.debug(2, "%s changed", fn) | 607 | self.logger.debug2("%s changed", fn) |
| 608 | self.remove(fn) | 608 | self.remove(fn) |
| 609 | return False | 609 | return False |
| 610 | 610 | ||
| @@ -615,13 +615,13 @@ class Cache(NoCache): | |||
| 615 | fmtime = bb.parse.cached_mtime_noerror(f) | 615 | fmtime = bb.parse.cached_mtime_noerror(f) |
| 616 | # Check if file still exists | 616 | # Check if file still exists |
| 617 | if old_mtime != 0 and fmtime == 0: | 617 | if old_mtime != 0 and fmtime == 0: |
| 618 | self.logger.debug(2, "%s's dependency %s was removed", | 618 | self.logger.debug2("%s's dependency %s was removed", |
| 619 | fn, f) | 619 | fn, f) |
| 620 | self.remove(fn) | 620 | self.remove(fn) |
| 621 | return False | 621 | return False |
| 622 | 622 | ||
| 623 | if (fmtime != old_mtime): | 623 | if (fmtime != old_mtime): |
| 624 | self.logger.debug(2, "%s's dependency %s changed", | 624 | self.logger.debug2("%s's dependency %s changed", |
| 625 | fn, f) | 625 | fn, f) |
| 626 | self.remove(fn) | 626 | self.remove(fn) |
| 627 | return False | 627 | return False |
| @@ -638,14 +638,14 @@ class Cache(NoCache): | |||
| 638 | continue | 638 | continue |
| 639 | f, exist = f.split(":") | 639 | f, exist = f.split(":") |
| 640 | if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): | 640 | if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): |
| 641 | self.logger.debug(2, "%s's file checksum list file %s changed", | 641 | self.logger.debug2("%s's file checksum list file %s changed", |
| 642 | fn, f) | 642 | fn, f) |
| 643 | self.remove(fn) | 643 | self.remove(fn) |
| 644 | return False | 644 | return False |
| 645 | 645 | ||
| 646 | if tuple(appends) != tuple(info_array[0].appends): | 646 | if tuple(appends) != tuple(info_array[0].appends): |
| 647 | self.logger.debug(2, "appends for %s changed", fn) | 647 | self.logger.debug2("appends for %s changed", fn) |
| 648 | self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends))) | 648 | self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends))) |
| 649 | self.remove(fn) | 649 | self.remove(fn) |
| 650 | return False | 650 | return False |
| 651 | 651 | ||
| @@ -654,10 +654,10 @@ class Cache(NoCache): | |||
| 654 | virtualfn = variant2virtual(fn, cls) | 654 | virtualfn = variant2virtual(fn, cls) |
| 655 | self.clean.add(virtualfn) | 655 | self.clean.add(virtualfn) |
| 656 | if virtualfn not in self.depends_cache: | 656 | if virtualfn not in self.depends_cache: |
| 657 | self.logger.debug(2, "%s is not cached", virtualfn) | 657 | self.logger.debug2("%s is not cached", virtualfn) |
| 658 | invalid = True | 658 | invalid = True |
| 659 | elif len(self.depends_cache[virtualfn]) != len(self.caches_array): | 659 | elif len(self.depends_cache[virtualfn]) != len(self.caches_array): |
| 660 | self.logger.debug(2, "Extra caches missing for %s?" % virtualfn) | 660 | self.logger.debug2("Extra caches missing for %s?" % virtualfn) |
| 661 | invalid = True | 661 | invalid = True |
| 662 | 662 | ||
| 663 | # If any one of the variants is not present, mark as invalid for all | 663 | # If any one of the variants is not present, mark as invalid for all |
| @@ -665,10 +665,10 @@ class Cache(NoCache): | |||
| 665 | for cls in info_array[0].variants: | 665 | for cls in info_array[0].variants: |
| 666 | virtualfn = variant2virtual(fn, cls) | 666 | virtualfn = variant2virtual(fn, cls) |
| 667 | if virtualfn in self.clean: | 667 | if virtualfn in self.clean: |
| 668 | self.logger.debug(2, "Removing %s from cache", virtualfn) | 668 | self.logger.debug2("Removing %s from cache", virtualfn) |
| 669 | self.clean.remove(virtualfn) | 669 | self.clean.remove(virtualfn) |
| 670 | if fn in self.clean: | 670 | if fn in self.clean: |
| 671 | self.logger.debug(2, "Marking %s as not clean", fn) | 671 | self.logger.debug2("Marking %s as not clean", fn) |
| 672 | self.clean.remove(fn) | 672 | self.clean.remove(fn) |
| 673 | return False | 673 | return False |
| 674 | 674 | ||
| @@ -681,10 +681,10 @@ class Cache(NoCache): | |||
| 681 | Called from the parser in error cases | 681 | Called from the parser in error cases |
| 682 | """ | 682 | """ |
| 683 | if fn in self.depends_cache: | 683 | if fn in self.depends_cache: |
| 684 | self.logger.debug(1, "Removing %s from cache", fn) | 684 | self.logger.debug("Removing %s from cache", fn) |
| 685 | del self.depends_cache[fn] | 685 | del self.depends_cache[fn] |
| 686 | if fn in self.clean: | 686 | if fn in self.clean: |
| 687 | self.logger.debug(1, "Marking %s as unclean", fn) | 687 | self.logger.debug("Marking %s as unclean", fn) |
| 688 | self.clean.remove(fn) | 688 | self.clean.remove(fn) |
| 689 | 689 | ||
| 690 | def sync(self): | 690 | def sync(self): |
| @@ -697,13 +697,13 @@ class Cache(NoCache): | |||
| 697 | return | 697 | return |
| 698 | 698 | ||
| 699 | if self.cacheclean: | 699 | if self.cacheclean: |
| 700 | self.logger.debug(2, "Cache is clean, not saving.") | 700 | self.logger.debug2("Cache is clean, not saving.") |
| 701 | return | 701 | return |
| 702 | 702 | ||
| 703 | for cache_class in self.caches_array: | 703 | for cache_class in self.caches_array: |
| 704 | cache_class_name = cache_class.__name__ | 704 | cache_class_name = cache_class.__name__ |
| 705 | cachefile = self.getCacheFile(cache_class.cachefile) | 705 | cachefile = self.getCacheFile(cache_class.cachefile) |
| 706 | self.logger.debug(2, "Writing %s", cachefile) | 706 | self.logger.debug2("Writing %s", cachefile) |
| 707 | with open(cachefile, "wb") as f: | 707 | with open(cachefile, "wb") as f: |
| 708 | p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL) | 708 | p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL) |
| 709 | p.dump(__cache_version__) | 709 | p.dump(__cache_version__) |
| @@ -879,7 +879,7 @@ class MultiProcessCache(object): | |||
| 879 | bb.utils.mkdirhier(cachedir) | 879 | bb.utils.mkdirhier(cachedir) |
| 880 | self.cachefile = os.path.join(cachedir, | 880 | self.cachefile = os.path.join(cachedir, |
| 881 | cache_file_name or self.__class__.cache_file_name) | 881 | cache_file_name or self.__class__.cache_file_name) |
| 882 | logger.debug(1, "Using cache in '%s'", self.cachefile) | 882 | logger.debug("Using cache in '%s'", self.cachefile) |
| 883 | 883 | ||
| 884 | glf = bb.utils.lockfile(self.cachefile + ".lock") | 884 | glf = bb.utils.lockfile(self.cachefile + ".lock") |
| 885 | 885 | ||
| @@ -985,7 +985,7 @@ class SimpleCache(object): | |||
| 985 | bb.utils.mkdirhier(cachedir) | 985 | bb.utils.mkdirhier(cachedir) |
| 986 | self.cachefile = os.path.join(cachedir, | 986 | self.cachefile = os.path.join(cachedir, |
| 987 | cache_file_name or self.__class__.cache_file_name) | 987 | cache_file_name or self.__class__.cache_file_name) |
| 988 | logger.debug(1, "Using cache in '%s'", self.cachefile) | 988 | logger.debug("Using cache in '%s'", self.cachefile) |
| 989 | 989 | ||
| 990 | glf = bb.utils.lockfile(self.cachefile + ".lock") | 990 | glf = bb.utils.lockfile(self.cachefile + ".lock") |
| 991 | 991 | ||
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 67bf925441..0e492b9be9 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
| @@ -443,7 +443,7 @@ class BBCooker: | |||
| 443 | continue | 443 | continue |
| 444 | except AttributeError: | 444 | except AttributeError: |
| 445 | pass | 445 | pass |
| 446 | logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) | 446 | logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) |
| 447 | print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) | 447 | print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o])) |
| 448 | clean = False | 448 | clean = False |
| 449 | if hasattr(self.configuration, o): | 449 | if hasattr(self.configuration, o): |
| @@ -470,17 +470,17 @@ class BBCooker: | |||
| 470 | 470 | ||
| 471 | for k in bb.utils.approved_variables(): | 471 | for k in bb.utils.approved_variables(): |
| 472 | if k in environment and k not in self.configuration.env: | 472 | if k in environment and k not in self.configuration.env: |
| 473 | logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k])) | 473 | logger.debug("Updating new environment variable %s to %s" % (k, environment[k])) |
| 474 | self.configuration.env[k] = environment[k] | 474 | self.configuration.env[k] = environment[k] |
| 475 | clean = False | 475 | clean = False |
| 476 | if k in self.configuration.env and k not in environment: | 476 | if k in self.configuration.env and k not in environment: |
| 477 | logger.debug(1, "Updating environment variable %s (deleted)" % (k)) | 477 | logger.debug("Updating environment variable %s (deleted)" % (k)) |
| 478 | del self.configuration.env[k] | 478 | del self.configuration.env[k] |
| 479 | clean = False | 479 | clean = False |
| 480 | if k not in self.configuration.env and k not in environment: | 480 | if k not in self.configuration.env and k not in environment: |
| 481 | continue | 481 | continue |
| 482 | if environment[k] != self.configuration.env[k]: | 482 | if environment[k] != self.configuration.env[k]: |
| 483 | logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k])) | 483 | logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k])) |
| 484 | self.configuration.env[k] = environment[k] | 484 | self.configuration.env[k] = environment[k] |
| 485 | clean = False | 485 | clean = False |
| 486 | 486 | ||
| @@ -488,7 +488,7 @@ class BBCooker: | |||
| 488 | self.configuration.env = environment | 488 | self.configuration.env = environment |
| 489 | 489 | ||
| 490 | if not clean: | 490 | if not clean: |
| 491 | logger.debug(1, "Base environment change, triggering reparse") | 491 | logger.debug("Base environment change, triggering reparse") |
| 492 | self.reset() | 492 | self.reset() |
| 493 | 493 | ||
| 494 | def runCommands(self, server, data, abort): | 494 | def runCommands(self, server, data, abort): |
| @@ -699,7 +699,7 @@ class BBCooker: | |||
| 699 | if depmc not in self.multiconfigs: | 699 | if depmc not in self.multiconfigs: |
| 700 | bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) | 700 | bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) |
| 701 | else: | 701 | else: |
| 702 | logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3]) | 702 | logger.debug("Adding providers for multiconfig dependency %s" % l[3]) |
| 703 | taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) | 703 | taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) |
| 704 | seen.add(k) | 704 | seen.add(k) |
| 705 | new = True | 705 | new = True |
| @@ -1555,7 +1555,7 @@ class BBCooker: | |||
| 1555 | self.inotify_modified_files = [] | 1555 | self.inotify_modified_files = [] |
| 1556 | 1556 | ||
| 1557 | if not self.baseconfig_valid: | 1557 | if not self.baseconfig_valid: |
| 1558 | logger.debug(1, "Reloading base configuration data") | 1558 | logger.debug("Reloading base configuration data") |
| 1559 | self.initConfigurationData() | 1559 | self.initConfigurationData() |
| 1560 | self.handlePRServ() | 1560 | self.handlePRServ() |
| 1561 | 1561 | ||
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index ee3d7b1672..19169d780f 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
| @@ -428,7 +428,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
| 428 | uri_decoded = list(decodeurl(ud.url)) | 428 | uri_decoded = list(decodeurl(ud.url)) |
| 429 | uri_find_decoded = list(decodeurl(uri_find)) | 429 | uri_find_decoded = list(decodeurl(uri_find)) |
| 430 | uri_replace_decoded = list(decodeurl(uri_replace)) | 430 | uri_replace_decoded = list(decodeurl(uri_replace)) |
| 431 | logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) | 431 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) |
| 432 | result_decoded = ['', '', '', '', '', {}] | 432 | result_decoded = ['', '', '', '', '', {}] |
| 433 | for loc, i in enumerate(uri_find_decoded): | 433 | for loc, i in enumerate(uri_find_decoded): |
| 434 | result_decoded[loc] = uri_decoded[loc] | 434 | result_decoded[loc] = uri_decoded[loc] |
| @@ -474,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
| 474 | result = encodeurl(result_decoded) | 474 | result = encodeurl(result_decoded) |
| 475 | if result == ud.url: | 475 | if result == ud.url: |
| 476 | return None | 476 | return None |
| 477 | logger.debug(2, "For url %s returning %s" % (ud.url, result)) | 477 | logger.debug2("For url %s returning %s" % (ud.url, result)) |
| 478 | return result | 478 | return result |
| 479 | 479 | ||
| 480 | methods = [] | 480 | methods = [] |
| @@ -499,9 +499,9 @@ def fetcher_init(d): | |||
| 499 | # When to drop SCM head revisions controlled by user policy | 499 | # When to drop SCM head revisions controlled by user policy |
| 500 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" | 500 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" |
| 501 | if srcrev_policy == "cache": | 501 | if srcrev_policy == "cache": |
| 502 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 502 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 503 | elif srcrev_policy == "clear": | 503 | elif srcrev_policy == "clear": |
| 504 | logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | 504 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) |
| 505 | revs.clear() | 505 | revs.clear() |
| 506 | else: | 506 | else: |
| 507 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | 507 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) |
| @@ -857,9 +857,9 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | |||
| 857 | cmd = 'export PSEUDO_DISABLED=1; ' + cmd | 857 | cmd = 'export PSEUDO_DISABLED=1; ' + cmd |
| 858 | 858 | ||
| 859 | if workdir: | 859 | if workdir: |
| 860 | logger.debug(1, "Running '%s' in %s" % (cmd, workdir)) | 860 | logger.debug("Running '%s' in %s" % (cmd, workdir)) |
| 861 | else: | 861 | else: |
| 862 | logger.debug(1, "Running %s", cmd) | 862 | logger.debug("Running %s", cmd) |
| 863 | 863 | ||
| 864 | success = False | 864 | success = False |
| 865 | error_message = "" | 865 | error_message = "" |
| @@ -900,7 +900,7 @@ def check_network_access(d, info, url): | |||
| 900 | elif not trusted_network(d, url): | 900 | elif not trusted_network(d, url): |
| 901 | raise UntrustedUrl(url, info) | 901 | raise UntrustedUrl(url, info) |
| 902 | else: | 902 | else: |
| 903 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) | 903 | logger.debug("Fetcher accessed the network with the command %s" % info) |
| 904 | 904 | ||
| 905 | def build_mirroruris(origud, mirrors, ld): | 905 | def build_mirroruris(origud, mirrors, ld): |
| 906 | uris = [] | 906 | uris = [] |
| @@ -926,7 +926,7 @@ def build_mirroruris(origud, mirrors, ld): | |||
| 926 | continue | 926 | continue |
| 927 | 927 | ||
| 928 | if not trusted_network(ld, newuri): | 928 | if not trusted_network(ld, newuri): |
| 929 | logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri)) | 929 | logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri)) |
| 930 | continue | 930 | continue |
| 931 | 931 | ||
| 932 | # Create a local copy of the mirrors minus the current line | 932 | # Create a local copy of the mirrors minus the current line |
| @@ -939,8 +939,8 @@ def build_mirroruris(origud, mirrors, ld): | |||
| 939 | newud = FetchData(newuri, ld) | 939 | newud = FetchData(newuri, ld) |
| 940 | newud.setup_localpath(ld) | 940 | newud.setup_localpath(ld) |
| 941 | except bb.fetch2.BBFetchException as e: | 941 | except bb.fetch2.BBFetchException as e: |
| 942 | logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) | 942 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) |
| 943 | logger.debug(1, str(e)) | 943 | logger.debug(str(e)) |
| 944 | try: | 944 | try: |
| 945 | # setup_localpath of file:// urls may fail, we should still see | 945 | # setup_localpath of file:// urls may fail, we should still see |
| 946 | # if mirrors of the url exist | 946 | # if mirrors of the url exist |
| @@ -1043,8 +1043,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
| 1043 | elif isinstance(e, NoChecksumError): | 1043 | elif isinstance(e, NoChecksumError): |
| 1044 | raise | 1044 | raise |
| 1045 | else: | 1045 | else: |
| 1046 | logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) | 1046 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) |
| 1047 | logger.debug(1, str(e)) | 1047 | logger.debug(str(e)) |
| 1048 | try: | 1048 | try: |
| 1049 | ud.method.clean(ud, ld) | 1049 | ud.method.clean(ud, ld) |
| 1050 | except UnboundLocalError: | 1050 | except UnboundLocalError: |
| @@ -1688,7 +1688,7 @@ class Fetch(object): | |||
| 1688 | if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): | 1688 | if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): |
| 1689 | done = True | 1689 | done = True |
| 1690 | elif m.try_premirror(ud, self.d): | 1690 | elif m.try_premirror(ud, self.d): |
| 1691 | logger.debug(1, "Trying PREMIRRORS") | 1691 | logger.debug("Trying PREMIRRORS") |
| 1692 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) | 1692 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) |
| 1693 | done = m.try_mirrors(self, ud, self.d, mirrors) | 1693 | done = m.try_mirrors(self, ud, self.d, mirrors) |
| 1694 | if done: | 1694 | if done: |
| @@ -1698,7 +1698,7 @@ class Fetch(object): | |||
| 1698 | m.update_donestamp(ud, self.d) | 1698 | m.update_donestamp(ud, self.d) |
| 1699 | except ChecksumError as e: | 1699 | except ChecksumError as e: |
| 1700 | logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u) | 1700 | logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u) |
| 1701 | logger.debug(1, str(e)) | 1701 | logger.debug(str(e)) |
| 1702 | done = False | 1702 | done = False |
| 1703 | 1703 | ||
| 1704 | if premirroronly: | 1704 | if premirroronly: |
| @@ -1710,7 +1710,7 @@ class Fetch(object): | |||
| 1710 | try: | 1710 | try: |
| 1711 | if not trusted_network(self.d, ud.url): | 1711 | if not trusted_network(self.d, ud.url): |
| 1712 | raise UntrustedUrl(ud.url) | 1712 | raise UntrustedUrl(ud.url) |
| 1713 | logger.debug(1, "Trying Upstream") | 1713 | logger.debug("Trying Upstream") |
| 1714 | m.download(ud, self.d) | 1714 | m.download(ud, self.d) |
| 1715 | if hasattr(m, "build_mirror_data"): | 1715 | if hasattr(m, "build_mirror_data"): |
| 1716 | m.build_mirror_data(ud, self.d) | 1716 | m.build_mirror_data(ud, self.d) |
| @@ -1725,19 +1725,19 @@ class Fetch(object): | |||
| 1725 | except BBFetchException as e: | 1725 | except BBFetchException as e: |
| 1726 | if isinstance(e, ChecksumError): | 1726 | if isinstance(e, ChecksumError): |
| 1727 | logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u) | 1727 | logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u) |
| 1728 | logger.debug(1, str(e)) | 1728 | logger.debug(str(e)) |
| 1729 | if os.path.exists(ud.localpath): | 1729 | if os.path.exists(ud.localpath): |
| 1730 | rename_bad_checksum(ud, e.checksum) | 1730 | rename_bad_checksum(ud, e.checksum) |
| 1731 | elif isinstance(e, NoChecksumError): | 1731 | elif isinstance(e, NoChecksumError): |
| 1732 | raise | 1732 | raise |
| 1733 | else: | 1733 | else: |
| 1734 | logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u) | 1734 | logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u) |
| 1735 | logger.debug(1, str(e)) | 1735 | logger.debug(str(e)) |
| 1736 | firsterr = e | 1736 | firsterr = e |
| 1737 | # Remove any incomplete fetch | 1737 | # Remove any incomplete fetch |
| 1738 | if not verified_stamp: | 1738 | if not verified_stamp: |
| 1739 | m.clean(ud, self.d) | 1739 | m.clean(ud, self.d) |
| 1740 | logger.debug(1, "Trying MIRRORS") | 1740 | logger.debug("Trying MIRRORS") |
| 1741 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) | 1741 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) |
| 1742 | done = m.try_mirrors(self, ud, self.d, mirrors) | 1742 | done = m.try_mirrors(self, ud, self.d, mirrors) |
| 1743 | 1743 | ||
| @@ -1774,7 +1774,7 @@ class Fetch(object): | |||
| 1774 | ud = self.ud[u] | 1774 | ud = self.ud[u] |
| 1775 | ud.setup_localpath(self.d) | 1775 | ud.setup_localpath(self.d) |
| 1776 | m = ud.method | 1776 | m = ud.method |
| 1777 | logger.debug(1, "Testing URL %s", u) | 1777 | logger.debug("Testing URL %s", u) |
| 1778 | # First try checking uri, u, from PREMIRRORS | 1778 | # First try checking uri, u, from PREMIRRORS |
| 1779 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) | 1779 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) |
| 1780 | ret = m.try_mirrors(self, ud, self.d, mirrors, True) | 1780 | ret = m.try_mirrors(self, ud, self.d, mirrors, True) |
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py index 566ace9f05..fc558f50b0 100644 --- a/bitbake/lib/bb/fetch2/bzr.py +++ b/bitbake/lib/bb/fetch2/bzr.py | |||
| @@ -74,16 +74,16 @@ class Bzr(FetchMethod): | |||
| 74 | 74 | ||
| 75 | if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): | 75 | if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): |
| 76 | bzrcmd = self._buildbzrcommand(ud, d, "update") | 76 | bzrcmd = self._buildbzrcommand(ud, d, "update") |
| 77 | logger.debug(1, "BZR Update %s", ud.url) | 77 | logger.debug("BZR Update %s", ud.url) |
| 78 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) | 78 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) |
| 79 | runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path))) | 79 | runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path))) |
| 80 | else: | 80 | else: |
| 81 | bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) | 81 | bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) |
| 82 | bzrcmd = self._buildbzrcommand(ud, d, "fetch") | 82 | bzrcmd = self._buildbzrcommand(ud, d, "fetch") |
| 83 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) | 83 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) |
| 84 | logger.debug(1, "BZR Checkout %s", ud.url) | 84 | logger.debug("BZR Checkout %s", ud.url) |
| 85 | bb.utils.mkdirhier(ud.pkgdir) | 85 | bb.utils.mkdirhier(ud.pkgdir) |
| 86 | logger.debug(1, "Running %s", bzrcmd) | 86 | logger.debug("Running %s", bzrcmd) |
| 87 | runfetchcmd(bzrcmd, d, workdir=ud.pkgdir) | 87 | runfetchcmd(bzrcmd, d, workdir=ud.pkgdir) |
| 88 | 88 | ||
| 89 | scmdata = ud.parm.get("scmdata", "") | 89 | scmdata = ud.parm.get("scmdata", "") |
| @@ -109,7 +109,7 @@ class Bzr(FetchMethod): | |||
| 109 | """ | 109 | """ |
| 110 | Return the latest upstream revision number | 110 | Return the latest upstream revision number |
| 111 | """ | 111 | """ |
| 112 | logger.debug(2, "BZR fetcher hitting network for %s", ud.url) | 112 | logger.debug2("BZR fetcher hitting network for %s", ud.url) |
| 113 | 113 | ||
| 114 | bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) | 114 | bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) |
| 115 | 115 | ||
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py index 49d7ae1b09..1a9c863769 100644 --- a/bitbake/lib/bb/fetch2/clearcase.py +++ b/bitbake/lib/bb/fetch2/clearcase.py | |||
| @@ -70,7 +70,7 @@ class ClearCase(FetchMethod): | |||
| 70 | return ud.type in ['ccrc'] | 70 | return ud.type in ['ccrc'] |
| 71 | 71 | ||
| 72 | def debug(self, msg): | 72 | def debug(self, msg): |
| 73 | logger.debug(1, "ClearCase: %s", msg) | 73 | logger.debug("ClearCase: %s", msg) |
| 74 | 74 | ||
| 75 | def urldata_init(self, ud, d): | 75 | def urldata_init(self, ud, d): |
| 76 | """ | 76 | """ |
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py index 22abdef792..01de5ff4ca 100644 --- a/bitbake/lib/bb/fetch2/cvs.py +++ b/bitbake/lib/bb/fetch2/cvs.py | |||
| @@ -109,7 +109,7 @@ class Cvs(FetchMethod): | |||
| 109 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) | 109 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) |
| 110 | 110 | ||
| 111 | # create module directory | 111 | # create module directory |
| 112 | logger.debug(2, "Fetch: checking for module directory") | 112 | logger.debug2("Fetch: checking for module directory") |
| 113 | moddir = os.path.join(ud.pkgdir, localdir) | 113 | moddir = os.path.join(ud.pkgdir, localdir) |
| 114 | workdir = None | 114 | workdir = None |
| 115 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): | 115 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): |
| @@ -123,7 +123,7 @@ class Cvs(FetchMethod): | |||
| 123 | # check out sources there | 123 | # check out sources there |
| 124 | bb.utils.mkdirhier(ud.pkgdir) | 124 | bb.utils.mkdirhier(ud.pkgdir) |
| 125 | workdir = ud.pkgdir | 125 | workdir = ud.pkgdir |
| 126 | logger.debug(1, "Running %s", cvscmd) | 126 | logger.debug("Running %s", cvscmd) |
| 127 | bb.fetch2.check_network_access(d, cvscmd, ud.url) | 127 | bb.fetch2.check_network_access(d, cvscmd, ud.url) |
| 128 | cmd = cvscmd | 128 | cmd = cvscmd |
| 129 | 129 | ||
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py index d6e5c5c050..a4527bf364 100644 --- a/bitbake/lib/bb/fetch2/gitsm.py +++ b/bitbake/lib/bb/fetch2/gitsm.py | |||
| @@ -78,7 +78,7 @@ class GitSM(Git): | |||
| 78 | module_hash = "" | 78 | module_hash = "" |
| 79 | 79 | ||
| 80 | if not module_hash: | 80 | if not module_hash: |
| 81 | logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m) | 81 | logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) |
| 82 | continue | 82 | continue |
| 83 | 83 | ||
| 84 | submodules.append(m) | 84 | submodules.append(m) |
| @@ -179,7 +179,7 @@ class GitSM(Git): | |||
| 179 | (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) | 179 | (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) |
| 180 | 180 | ||
| 181 | if len(need_update_list) > 0: | 181 | if len(need_update_list) > 0: |
| 182 | logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) | 182 | logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) |
| 183 | return True | 183 | return True |
| 184 | 184 | ||
| 185 | return False | 185 | return False |
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py index 8f503701ed..063e13008a 100644 --- a/bitbake/lib/bb/fetch2/hg.py +++ b/bitbake/lib/bb/fetch2/hg.py | |||
| @@ -150,7 +150,7 @@ class Hg(FetchMethod): | |||
| 150 | def download(self, ud, d): | 150 | def download(self, ud, d): |
| 151 | """Fetch url""" | 151 | """Fetch url""" |
| 152 | 152 | ||
| 153 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 153 | logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") |
| 154 | 154 | ||
| 155 | # If the checkout doesn't exist and the mirror tarball does, extract it | 155 | # If the checkout doesn't exist and the mirror tarball does, extract it |
| 156 | if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror): | 156 | if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror): |
| @@ -160,7 +160,7 @@ class Hg(FetchMethod): | |||
| 160 | if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): | 160 | if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): |
| 161 | # Found the source, check whether need pull | 161 | # Found the source, check whether need pull |
| 162 | updatecmd = self._buildhgcommand(ud, d, "update") | 162 | updatecmd = self._buildhgcommand(ud, d, "update") |
| 163 | logger.debug(1, "Running %s", updatecmd) | 163 | logger.debug("Running %s", updatecmd) |
| 164 | try: | 164 | try: |
| 165 | runfetchcmd(updatecmd, d, workdir=ud.moddir) | 165 | runfetchcmd(updatecmd, d, workdir=ud.moddir) |
| 166 | except bb.fetch2.FetchError: | 166 | except bb.fetch2.FetchError: |
| @@ -168,7 +168,7 @@ class Hg(FetchMethod): | |||
| 168 | pullcmd = self._buildhgcommand(ud, d, "pull") | 168 | pullcmd = self._buildhgcommand(ud, d, "pull") |
| 169 | logger.info("Pulling " + ud.url) | 169 | logger.info("Pulling " + ud.url) |
| 170 | # update sources there | 170 | # update sources there |
| 171 | logger.debug(1, "Running %s", pullcmd) | 171 | logger.debug("Running %s", pullcmd) |
| 172 | bb.fetch2.check_network_access(d, pullcmd, ud.url) | 172 | bb.fetch2.check_network_access(d, pullcmd, ud.url) |
| 173 | runfetchcmd(pullcmd, d, workdir=ud.moddir) | 173 | runfetchcmd(pullcmd, d, workdir=ud.moddir) |
| 174 | try: | 174 | try: |
| @@ -183,14 +183,14 @@ class Hg(FetchMethod): | |||
| 183 | logger.info("Fetch " + ud.url) | 183 | logger.info("Fetch " + ud.url) |
| 184 | # check out sources there | 184 | # check out sources there |
| 185 | bb.utils.mkdirhier(ud.pkgdir) | 185 | bb.utils.mkdirhier(ud.pkgdir) |
| 186 | logger.debug(1, "Running %s", fetchcmd) | 186 | logger.debug("Running %s", fetchcmd) |
| 187 | bb.fetch2.check_network_access(d, fetchcmd, ud.url) | 187 | bb.fetch2.check_network_access(d, fetchcmd, ud.url) |
| 188 | runfetchcmd(fetchcmd, d, workdir=ud.pkgdir) | 188 | runfetchcmd(fetchcmd, d, workdir=ud.pkgdir) |
| 189 | 189 | ||
| 190 | # Even when we clone (fetch), we still need to update as hg's clone | 190 | # Even when we clone (fetch), we still need to update as hg's clone |
| 191 | # won't checkout the specified revision if its on a branch | 191 | # won't checkout the specified revision if its on a branch |
| 192 | updatecmd = self._buildhgcommand(ud, d, "update") | 192 | updatecmd = self._buildhgcommand(ud, d, "update") |
| 193 | logger.debug(1, "Running %s", updatecmd) | 193 | logger.debug("Running %s", updatecmd) |
| 194 | runfetchcmd(updatecmd, d, workdir=ud.moddir) | 194 | runfetchcmd(updatecmd, d, workdir=ud.moddir) |
| 195 | 195 | ||
| 196 | def clean(self, ud, d): | 196 | def clean(self, ud, d): |
| @@ -247,9 +247,9 @@ class Hg(FetchMethod): | |||
| 247 | if scmdata != "nokeep": | 247 | if scmdata != "nokeep": |
| 248 | proto = ud.parm.get('protocol', 'http') | 248 | proto = ud.parm.get('protocol', 'http') |
| 249 | if not os.access(os.path.join(codir, '.hg'), os.R_OK): | 249 | if not os.access(os.path.join(codir, '.hg'), os.R_OK): |
| 250 | logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'") | 250 | logger.debug2("Unpack: creating new hg repository in '" + codir + "'") |
| 251 | runfetchcmd("%s init %s" % (ud.basecmd, codir), d) | 251 | runfetchcmd("%s init %s" % (ud.basecmd, codir), d) |
| 252 | logger.debug(2, "Unpack: updating source in '" + codir + "'") | 252 | logger.debug2("Unpack: updating source in '" + codir + "'") |
| 253 | if ud.user and ud.pswd: | 253 | if ud.user and ud.pswd: |
| 254 | runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir) | 254 | runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir) |
| 255 | else: | 255 | else: |
| @@ -259,5 +259,5 @@ class Hg(FetchMethod): | |||
| 259 | else: | 259 | else: |
| 260 | runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir) | 260 | runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir) |
| 261 | else: | 261 | else: |
| 262 | logger.debug(2, "Unpack: extracting source to '" + codir + "'") | 262 | logger.debug2("Unpack: extracting source to '" + codir + "'") |
| 263 | runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir) | 263 | runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir) |
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py index 25d4557db6..e7d1c8c58f 100644 --- a/bitbake/lib/bb/fetch2/local.py +++ b/bitbake/lib/bb/fetch2/local.py | |||
| @@ -54,12 +54,12 @@ class Local(FetchMethod): | |||
| 54 | return [path] | 54 | return [path] |
| 55 | filespath = d.getVar('FILESPATH') | 55 | filespath = d.getVar('FILESPATH') |
| 56 | if filespath: | 56 | if filespath: |
| 57 | logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) | 57 | logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) |
| 58 | newpath, hist = bb.utils.which(filespath, path, history=True) | 58 | newpath, hist = bb.utils.which(filespath, path, history=True) |
| 59 | searched.extend(hist) | 59 | searched.extend(hist) |
| 60 | if not os.path.exists(newpath): | 60 | if not os.path.exists(newpath): |
| 61 | dldirfile = os.path.join(d.getVar("DL_DIR"), path) | 61 | dldirfile = os.path.join(d.getVar("DL_DIR"), path) |
| 62 | logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) | 62 | logger.debug2("Defaulting to %s for %s" % (dldirfile, path)) |
| 63 | bb.utils.mkdirhier(os.path.dirname(dldirfile)) | 63 | bb.utils.mkdirhier(os.path.dirname(dldirfile)) |
| 64 | searched.append(dldirfile) | 64 | searched.append(dldirfile) |
| 65 | return searched | 65 | return searched |
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py index 3a6cd29510..d9ce44390c 100644 --- a/bitbake/lib/bb/fetch2/osc.py +++ b/bitbake/lib/bb/fetch2/osc.py | |||
| @@ -84,13 +84,13 @@ class Osc(FetchMethod): | |||
| 84 | Fetch url | 84 | Fetch url |
| 85 | """ | 85 | """ |
| 86 | 86 | ||
| 87 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 87 | logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") |
| 88 | 88 | ||
| 89 | if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): | 89 | if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): |
| 90 | oscupdatecmd = self._buildosccommand(ud, d, "update") | 90 | oscupdatecmd = self._buildosccommand(ud, d, "update") |
| 91 | logger.info("Update "+ ud.url) | 91 | logger.info("Update "+ ud.url) |
| 92 | # update sources there | 92 | # update sources there |
| 93 | logger.debug(1, "Running %s", oscupdatecmd) | 93 | logger.debug("Running %s", oscupdatecmd) |
| 94 | bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) | 94 | bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) |
| 95 | runfetchcmd(oscupdatecmd, d, workdir=ud.moddir) | 95 | runfetchcmd(oscupdatecmd, d, workdir=ud.moddir) |
| 96 | else: | 96 | else: |
| @@ -98,7 +98,7 @@ class Osc(FetchMethod): | |||
| 98 | logger.info("Fetch " + ud.url) | 98 | logger.info("Fetch " + ud.url) |
| 99 | # check out sources there | 99 | # check out sources there |
| 100 | bb.utils.mkdirhier(ud.pkgdir) | 100 | bb.utils.mkdirhier(ud.pkgdir) |
| 101 | logger.debug(1, "Running %s", oscfetchcmd) | 101 | logger.debug("Running %s", oscfetchcmd) |
| 102 | bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) | 102 | bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) |
| 103 | runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir) | 103 | runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir) |
| 104 | 104 | ||
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py index da6d337461..e2a41a4a12 100644 --- a/bitbake/lib/bb/fetch2/perforce.py +++ b/bitbake/lib/bb/fetch2/perforce.py | |||
| @@ -90,16 +90,16 @@ class Perforce(FetchMethod): | |||
| 90 | p4port = d.getVar('P4PORT') | 90 | p4port = d.getVar('P4PORT') |
| 91 | 91 | ||
| 92 | if p4port: | 92 | if p4port: |
| 93 | logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) | 93 | logger.debug('Using recipe provided P4PORT: %s' % p4port) |
| 94 | ud.host = p4port | 94 | ud.host = p4port |
| 95 | else: | 95 | else: |
| 96 | logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...') | 96 | logger.debug('Trying to use P4CONFIG to automatically set P4PORT...') |
| 97 | ud.usingp4config = True | 97 | ud.usingp4config = True |
| 98 | p4cmd = '%s info | grep "Server address"' % ud.basecmd | 98 | p4cmd = '%s info | grep "Server address"' % ud.basecmd |
| 99 | bb.fetch2.check_network_access(d, p4cmd, ud.url) | 99 | bb.fetch2.check_network_access(d, p4cmd, ud.url) |
| 100 | ud.host = runfetchcmd(p4cmd, d, True) | 100 | ud.host = runfetchcmd(p4cmd, d, True) |
| 101 | ud.host = ud.host.split(': ')[1].strip() | 101 | ud.host = ud.host.split(': ')[1].strip() |
| 102 | logger.debug(1, 'Determined P4PORT to be: %s' % ud.host) | 102 | logger.debug('Determined P4PORT to be: %s' % ud.host) |
| 103 | if not ud.host: | 103 | if not ud.host: |
| 104 | raise FetchError('Could not determine P4PORT from P4CONFIG') | 104 | raise FetchError('Could not determine P4PORT from P4CONFIG') |
| 105 | 105 | ||
| @@ -208,7 +208,7 @@ class Perforce(FetchMethod): | |||
| 208 | for filename in p4fileslist: | 208 | for filename in p4fileslist: |
| 209 | item = filename.split(' - ') | 209 | item = filename.split(' - ') |
| 210 | lastaction = item[1].split() | 210 | lastaction = item[1].split() |
| 211 | logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0])) | 211 | logger.debug('File: %s Last Action: %s' % (item[0], lastaction[0])) |
| 212 | if lastaction[0] == 'delete': | 212 | if lastaction[0] == 'delete': |
| 213 | continue | 213 | continue |
| 214 | filelist.append(item[0]) | 214 | filelist.append(item[0]) |
| @@ -255,7 +255,7 @@ class Perforce(FetchMethod): | |||
| 255 | raise FetchError('Could not determine the latest perforce changelist') | 255 | raise FetchError('Could not determine the latest perforce changelist') |
| 256 | 256 | ||
| 257 | tipcset = tip.split(' ')[1] | 257 | tipcset = tip.split(' ')[1] |
| 258 | logger.debug(1, 'p4 tip found to be changelist %s' % tipcset) | 258 | logger.debug('p4 tip found to be changelist %s' % tipcset) |
| 259 | return tipcset | 259 | return tipcset |
| 260 | 260 | ||
| 261 | def sortable_revision(self, ud, d, name): | 261 | def sortable_revision(self, ud, d, name): |
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py index 2bdbbd4097..fa4cb8149b 100644 --- a/bitbake/lib/bb/fetch2/repo.py +++ b/bitbake/lib/bb/fetch2/repo.py | |||
| @@ -47,7 +47,7 @@ class Repo(FetchMethod): | |||
| 47 | """Fetch url""" | 47 | """Fetch url""" |
| 48 | 48 | ||
| 49 | if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK): | 49 | if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK): |
| 50 | logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) | 50 | logger.debug("%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) |
| 51 | return | 51 | return |
| 52 | 52 | ||
| 53 | repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo") | 53 | repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo") |
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py index 971a5add4a..8856ef1c62 100644 --- a/bitbake/lib/bb/fetch2/svn.py +++ b/bitbake/lib/bb/fetch2/svn.py | |||
| @@ -116,7 +116,7 @@ class Svn(FetchMethod): | |||
| 116 | def download(self, ud, d): | 116 | def download(self, ud, d): |
| 117 | """Fetch url""" | 117 | """Fetch url""" |
| 118 | 118 | ||
| 119 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 119 | logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") |
| 120 | 120 | ||
| 121 | lf = bb.utils.lockfile(ud.svnlock) | 121 | lf = bb.utils.lockfile(ud.svnlock) |
| 122 | 122 | ||
| @@ -129,7 +129,7 @@ class Svn(FetchMethod): | |||
| 129 | runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) | 129 | runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) |
| 130 | except FetchError: | 130 | except FetchError: |
| 131 | pass | 131 | pass |
| 132 | logger.debug(1, "Running %s", svncmd) | 132 | logger.debug("Running %s", svncmd) |
| 133 | bb.fetch2.check_network_access(d, svncmd, ud.url) | 133 | bb.fetch2.check_network_access(d, svncmd, ud.url) |
| 134 | runfetchcmd(svncmd, d, workdir=ud.moddir) | 134 | runfetchcmd(svncmd, d, workdir=ud.moddir) |
| 135 | else: | 135 | else: |
| @@ -137,7 +137,7 @@ class Svn(FetchMethod): | |||
| 137 | logger.info("Fetch " + ud.url) | 137 | logger.info("Fetch " + ud.url) |
| 138 | # check out sources there | 138 | # check out sources there |
| 139 | bb.utils.mkdirhier(ud.pkgdir) | 139 | bb.utils.mkdirhier(ud.pkgdir) |
| 140 | logger.debug(1, "Running %s", svncmd) | 140 | logger.debug("Running %s", svncmd) |
| 141 | bb.fetch2.check_network_access(d, svncmd, ud.url) | 141 | bb.fetch2.check_network_access(d, svncmd, ud.url) |
| 142 | runfetchcmd(svncmd, d, workdir=ud.pkgdir) | 142 | runfetchcmd(svncmd, d, workdir=ud.pkgdir) |
| 143 | 143 | ||
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index e952f411c7..78a49676fe 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
| @@ -88,7 +88,7 @@ class Wget(FetchMethod): | |||
| 88 | 88 | ||
| 89 | progresshandler = WgetProgressHandler(d) | 89 | progresshandler = WgetProgressHandler(d) |
| 90 | 90 | ||
| 91 | logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) | 91 | logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) |
| 92 | bb.fetch2.check_network_access(d, command, ud.url) | 92 | bb.fetch2.check_network_access(d, command, ud.url) |
| 93 | runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) | 93 | runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) |
| 94 | 94 | ||
| @@ -326,11 +326,11 @@ class Wget(FetchMethod): | |||
| 326 | pass | 326 | pass |
| 327 | except urllib.error.URLError as e: | 327 | except urllib.error.URLError as e: |
| 328 | if try_again: | 328 | if try_again: |
| 329 | logger.debug(2, "checkstatus: trying again") | 329 | logger.debug2("checkstatus: trying again") |
| 330 | return self.checkstatus(fetch, ud, d, False) | 330 | return self.checkstatus(fetch, ud, d, False) |
| 331 | else: | 331 | else: |
| 332 | # debug for now to avoid spamming the logs in e.g. remote sstate searches | 332 | # debug for now to avoid spamming the logs in e.g. remote sstate searches |
| 333 | logger.debug(2, "checkstatus() urlopen failed: %s" % e) | 333 | logger.debug2("checkstatus() urlopen failed: %s" % e) |
| 334 | return False | 334 | return False |
| 335 | return True | 335 | return True |
| 336 | 336 | ||
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py index 76e180b411..c01807ba87 100644 --- a/bitbake/lib/bb/parse/__init__.py +++ b/bitbake/lib/bb/parse/__init__.py | |||
| @@ -71,7 +71,7 @@ def update_mtime(f): | |||
| 71 | 71 | ||
| 72 | def update_cache(f): | 72 | def update_cache(f): |
| 73 | if f in __mtime_cache: | 73 | if f in __mtime_cache: |
| 74 | logger.debug(1, "Updating mtime cache for %s" % f) | 74 | logger.debug("Updating mtime cache for %s" % f) |
| 75 | update_mtime(f) | 75 | update_mtime(f) |
| 76 | 76 | ||
| 77 | def clear_cache(): | 77 | def clear_cache(): |
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py index 0714296af2..0596993d01 100644 --- a/bitbake/lib/bb/parse/ast.py +++ b/bitbake/lib/bb/parse/ast.py | |||
| @@ -34,7 +34,7 @@ class IncludeNode(AstNode): | |||
| 34 | Include the file and evaluate the statements | 34 | Include the file and evaluate the statements |
| 35 | """ | 35 | """ |
| 36 | s = data.expand(self.what_file) | 36 | s = data.expand(self.what_file) |
| 37 | logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s) | 37 | logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s) |
| 38 | 38 | ||
| 39 | # TODO: Cache those includes... maybe not here though | 39 | # TODO: Cache those includes... maybe not here though |
| 40 | if self.force: | 40 | if self.force: |
| @@ -376,7 +376,7 @@ def _create_variants(datastores, names, function, onlyfinalise): | |||
| 376 | def multi_finalize(fn, d): | 376 | def multi_finalize(fn, d): |
| 377 | appends = (d.getVar("__BBAPPEND") or "").split() | 377 | appends = (d.getVar("__BBAPPEND") or "").split() |
| 378 | for append in appends: | 378 | for append in appends: |
| 379 | logger.debug(1, "Appending .bbappend file %s to %s", append, fn) | 379 | logger.debug("Appending .bbappend file %s to %s", append, fn) |
| 380 | bb.parse.BBHandler.handle(append, d, True) | 380 | bb.parse.BBHandler.handle(append, d, True) |
| 381 | 381 | ||
| 382 | onlyfinalise = d.getVar("__ONLYFINALISE", False) | 382 | onlyfinalise = d.getVar("__ONLYFINALISE", False) |
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index 7fc162995f..f8988b8631 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
| @@ -60,7 +60,7 @@ def inherit(files, fn, lineno, d): | |||
| 60 | file = abs_fn | 60 | file = abs_fn |
| 61 | 61 | ||
| 62 | if not file in __inherit_cache: | 62 | if not file in __inherit_cache: |
| 63 | logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno)) | 63 | logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno)) |
| 64 | __inherit_cache.append( file ) | 64 | __inherit_cache.append( file ) |
| 65 | d.setVar('__inherit_cache', __inherit_cache) | 65 | d.setVar('__inherit_cache', __inherit_cache) |
| 66 | include(fn, file, lineno, d, "inherit") | 66 | include(fn, file, lineno, d, "inherit") |
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py index af64d3446e..f171c5c932 100644 --- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py | |||
| @@ -95,7 +95,7 @@ def include_single_file(parentfn, fn, lineno, data, error_out): | |||
| 95 | if exc.errno == errno.ENOENT: | 95 | if exc.errno == errno.ENOENT: |
| 96 | if error_out: | 96 | if error_out: |
| 97 | raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno) | 97 | raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno) |
| 98 | logger.debug(2, "CONF file '%s' not found", fn) | 98 | logger.debug2("CONF file '%s' not found", fn) |
| 99 | else: | 99 | else: |
| 100 | if error_out: | 100 | if error_out: |
| 101 | raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno) | 101 | raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno) |
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py index 5f4fbe3508..c6a209fb3f 100644 --- a/bitbake/lib/bb/persist_data.py +++ b/bitbake/lib/bb/persist_data.py | |||
| @@ -248,7 +248,7 @@ class PersistData(object): | |||
| 248 | stacklevel=2) | 248 | stacklevel=2) |
| 249 | 249 | ||
| 250 | self.data = persist(d) | 250 | self.data = persist(d) |
| 251 | logger.debug(1, "Using '%s' as the persistent data cache", | 251 | logger.debug("Using '%s' as the persistent data cache", |
| 252 | self.data.filename) | 252 | self.data.filename) |
| 253 | 253 | ||
| 254 | def addDomain(self, domain): | 254 | def addDomain(self, domain): |
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py index 3f66a3d99f..0c87dfd4bc 100644 --- a/bitbake/lib/bb/providers.py +++ b/bitbake/lib/bb/providers.py | |||
| @@ -165,7 +165,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): | |||
| 165 | available_vers.sort() | 165 | available_vers.sort() |
| 166 | logger.warn("versions of %s available: %s", pn, ' '.join(available_vers)) | 166 | logger.warn("versions of %s available: %s", pn, ' '.join(available_vers)) |
| 167 | else: | 167 | else: |
| 168 | logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) | 168 | logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) |
| 169 | 169 | ||
| 170 | return (preferred_ver, preferred_file) | 170 | return (preferred_ver, preferred_file) |
| 171 | 171 | ||
| @@ -232,7 +232,7 @@ def _filterProviders(providers, item, cfgData, dataCache): | |||
| 232 | pkg_pn[pn] = [] | 232 | pkg_pn[pn] = [] |
| 233 | pkg_pn[pn].append(p) | 233 | pkg_pn[pn].append(p) |
| 234 | 234 | ||
| 235 | logger.debug(1, "providers for %s are: %s", item, list(sorted(pkg_pn.keys()))) | 235 | logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys()))) |
| 236 | 236 | ||
| 237 | # First add PREFERRED_VERSIONS | 237 | # First add PREFERRED_VERSIONS |
| 238 | for pn in sorted(pkg_pn): | 238 | for pn in sorted(pkg_pn): |
| @@ -291,7 +291,7 @@ def filterProviders(providers, item, cfgData, dataCache): | |||
| 291 | foundUnique = True | 291 | foundUnique = True |
| 292 | break | 292 | break |
| 293 | 293 | ||
| 294 | logger.debug(1, "sorted providers for %s are: %s", item, eligible) | 294 | logger.debug("sorted providers for %s are: %s", item, eligible) |
| 295 | 295 | ||
| 296 | return eligible, foundUnique | 296 | return eligible, foundUnique |
| 297 | 297 | ||
| @@ -333,7 +333,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache): | |||
| 333 | provides = dataCache.pn_provides[pn] | 333 | provides = dataCache.pn_provides[pn] |
| 334 | for provide in provides: | 334 | for provide in provides: |
| 335 | prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide) | 335 | prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide) |
| 336 | #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) | 336 | #logger.debug("checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) |
| 337 | if prefervar in pns and pns[prefervar] not in preferred: | 337 | if prefervar in pns and pns[prefervar] not in preferred: |
| 338 | var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) | 338 | var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) |
| 339 | logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var) | 339 | logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var) |
| @@ -349,7 +349,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache): | |||
| 349 | if numberPreferred > 1: | 349 | if numberPreferred > 1: |
| 350 | logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s. You could set PREFERRED_RPROVIDER_%s" % (item, preferred, preferred_vars, item)) | 350 | logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s. You could set PREFERRED_RPROVIDER_%s" % (item, preferred, preferred_vars, item)) |
| 351 | 351 | ||
| 352 | logger.debug(1, "sorted runtime providers for %s are: %s", item, eligible) | 352 | logger.debug("sorted runtime providers for %s are: %s", item, eligible) |
| 353 | 353 | ||
| 354 | return eligible, numberPreferred | 354 | return eligible, numberPreferred |
| 355 | 355 | ||
| @@ -384,7 +384,7 @@ def getRuntimeProviders(dataCache, rdepend): | |||
| 384 | regexp_cache[pattern] = regexp | 384 | regexp_cache[pattern] = regexp |
| 385 | if regexp.match(rdepend): | 385 | if regexp.match(rdepend): |
| 386 | rproviders += dataCache.packages_dynamic[pattern] | 386 | rproviders += dataCache.packages_dynamic[pattern] |
| 387 | logger.debug(1, "Assuming %s is a dynamic package, but it may not exist" % rdepend) | 387 | logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend) |
| 388 | 388 | ||
| 389 | return rproviders | 389 | return rproviders |
| 390 | 390 | ||
| @@ -396,22 +396,22 @@ def buildWorldTargetList(dataCache, task=None): | |||
| 396 | if dataCache.world_target: | 396 | if dataCache.world_target: |
| 397 | return | 397 | return |
| 398 | 398 | ||
| 399 | logger.debug(1, "collating packages for \"world\"") | 399 | logger.debug("collating packages for \"world\"") |
| 400 | for f in dataCache.possible_world: | 400 | for f in dataCache.possible_world: |
| 401 | terminal = True | 401 | terminal = True |
| 402 | pn = dataCache.pkg_fn[f] | 402 | pn = dataCache.pkg_fn[f] |
| 403 | if task and task not in dataCache.task_deps[f]['tasks']: | 403 | if task and task not in dataCache.task_deps[f]['tasks']: |
| 404 | logger.debug(2, "World build skipping %s as task %s doesn't exist", f, task) | 404 | logger.debug2("World build skipping %s as task %s doesn't exist", f, task) |
| 405 | terminal = False | 405 | terminal = False |
| 406 | 406 | ||
| 407 | for p in dataCache.pn_provides[pn]: | 407 | for p in dataCache.pn_provides[pn]: |
| 408 | if p.startswith('virtual/'): | 408 | if p.startswith('virtual/'): |
| 409 | logger.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p) | 409 | logger.debug2("World build skipping %s due to %s provider starting with virtual/", f, p) |
| 410 | terminal = False | 410 | terminal = False |
| 411 | break | 411 | break |
| 412 | for pf in dataCache.providers[p]: | 412 | for pf in dataCache.providers[p]: |
| 413 | if dataCache.pkg_fn[pf] != pn: | 413 | if dataCache.pkg_fn[pf] != pn: |
| 414 | logger.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p) | 414 | logger.debug2("World build skipping %s due to both us and %s providing %s", f, pf, p) |
| 415 | terminal = False | 415 | terminal = False |
| 416 | break | 416 | break |
| 417 | if terminal: | 417 | if terminal: |
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 7d493eb402..2f521e5a14 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
| @@ -544,8 +544,8 @@ class RunQueueData: | |||
| 544 | for tid in self.runtaskentries: | 544 | for tid in self.runtaskentries: |
| 545 | if task_done[tid] is False or deps_left[tid] != 0: | 545 | if task_done[tid] is False or deps_left[tid] != 0: |
| 546 | problem_tasks.append(tid) | 546 | problem_tasks.append(tid) |
| 547 | logger.debug(2, "Task %s is not buildable", tid) | 547 | logger.debug2("Task %s is not buildable", tid) |
| 548 | logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[tid], deps_left[tid]) | 548 | logger.debug2("(Complete marker was %s and the remaining dependency count was %s)\n", task_done[tid], deps_left[tid]) |
| 549 | self.runtaskentries[tid].weight = weight[tid] | 549 | self.runtaskentries[tid].weight = weight[tid] |
| 550 | 550 | ||
| 551 | if problem_tasks: | 551 | if problem_tasks: |
| @@ -643,7 +643,7 @@ class RunQueueData: | |||
| 643 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) | 643 | (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) |
| 644 | #runtid = build_tid(mc, fn, taskname) | 644 | #runtid = build_tid(mc, fn, taskname) |
| 645 | 645 | ||
| 646 | #logger.debug(2, "Processing %s,%s:%s", mc, fn, taskname) | 646 | #logger.debug2("Processing %s,%s:%s", mc, fn, taskname) |
| 647 | 647 | ||
| 648 | depends = set() | 648 | depends = set() |
| 649 | task_deps = self.dataCaches[mc].task_deps[taskfn] | 649 | task_deps = self.dataCaches[mc].task_deps[taskfn] |
| @@ -1199,9 +1199,9 @@ class RunQueueData: | |||
| 1199 | """ | 1199 | """ |
| 1200 | Dump some debug information on the internal data structures | 1200 | Dump some debug information on the internal data structures |
| 1201 | """ | 1201 | """ |
| 1202 | logger.debug(3, "run_tasks:") | 1202 | logger.debug3("run_tasks:") |
| 1203 | for tid in self.runtaskentries: | 1203 | for tid in self.runtaskentries: |
| 1204 | logger.debug(3, " %s: %s Deps %s RevDeps %s", tid, | 1204 | logger.debug3(" %s: %s Deps %s RevDeps %s", tid, |
| 1205 | self.runtaskentries[tid].weight, | 1205 | self.runtaskentries[tid].weight, |
| 1206 | self.runtaskentries[tid].depends, | 1206 | self.runtaskentries[tid].depends, |
| 1207 | self.runtaskentries[tid].revdeps) | 1207 | self.runtaskentries[tid].revdeps) |
| @@ -1238,7 +1238,7 @@ class RunQueue: | |||
| 1238 | self.fakeworker = {} | 1238 | self.fakeworker = {} |
| 1239 | 1239 | ||
| 1240 | def _start_worker(self, mc, fakeroot = False, rqexec = None): | 1240 | def _start_worker(self, mc, fakeroot = False, rqexec = None): |
| 1241 | logger.debug(1, "Starting bitbake-worker") | 1241 | logger.debug("Starting bitbake-worker") |
| 1242 | magic = "decafbad" | 1242 | magic = "decafbad" |
| 1243 | if self.cooker.configuration.profile: | 1243 | if self.cooker.configuration.profile: |
| 1244 | magic = "decafbadbad" | 1244 | magic = "decafbadbad" |
| @@ -1283,7 +1283,7 @@ class RunQueue: | |||
| 1283 | def _teardown_worker(self, worker): | 1283 | def _teardown_worker(self, worker): |
| 1284 | if not worker: | 1284 | if not worker: |
| 1285 | return | 1285 | return |
| 1286 | logger.debug(1, "Teardown for bitbake-worker") | 1286 | logger.debug("Teardown for bitbake-worker") |
| 1287 | try: | 1287 | try: |
| 1288 | worker.process.stdin.write(b"<quit></quit>") | 1288 | worker.process.stdin.write(b"<quit></quit>") |
| 1289 | worker.process.stdin.flush() | 1289 | worker.process.stdin.flush() |
| @@ -1356,12 +1356,12 @@ class RunQueue: | |||
| 1356 | 1356 | ||
| 1357 | # If the stamp is missing, it's not current | 1357 | # If the stamp is missing, it's not current |
| 1358 | if not os.access(stampfile, os.F_OK): | 1358 | if not os.access(stampfile, os.F_OK): |
| 1359 | logger.debug(2, "Stampfile %s not available", stampfile) | 1359 | logger.debug2("Stampfile %s not available", stampfile) |
| 1360 | return False | 1360 | return False |
| 1361 | # If it's a 'nostamp' task, it's not current | 1361 | # If it's a 'nostamp' task, it's not current |
| 1362 | taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] | 1362 | taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] |
| 1363 | if 'nostamp' in taskdep and taskname in taskdep['nostamp']: | 1363 | if 'nostamp' in taskdep and taskname in taskdep['nostamp']: |
| 1364 | logger.debug(2, "%s.%s is nostamp\n", fn, taskname) | 1364 | logger.debug2("%s.%s is nostamp\n", fn, taskname) |
| 1365 | return False | 1365 | return False |
| 1366 | 1366 | ||
| 1367 | if taskname != "do_setscene" and taskname.endswith("_setscene"): | 1367 | if taskname != "do_setscene" and taskname.endswith("_setscene"): |
| @@ -1385,18 +1385,18 @@ class RunQueue: | |||
| 1385 | continue | 1385 | continue |
| 1386 | if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): | 1386 | if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): |
| 1387 | if not t2: | 1387 | if not t2: |
| 1388 | logger.debug(2, 'Stampfile %s does not exist', stampfile2) | 1388 | logger.debug2('Stampfile %s does not exist', stampfile2) |
| 1389 | iscurrent = False | 1389 | iscurrent = False |
| 1390 | break | 1390 | break |
| 1391 | if t1 < t2: | 1391 | if t1 < t2: |
| 1392 | logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2) | 1392 | logger.debug2('Stampfile %s < %s', stampfile, stampfile2) |
| 1393 | iscurrent = False | 1393 | iscurrent = False |
| 1394 | break | 1394 | break |
| 1395 | if recurse and iscurrent: | 1395 | if recurse and iscurrent: |
| 1396 | if dep in cache: | 1396 | if dep in cache: |
| 1397 | iscurrent = cache[dep] | 1397 | iscurrent = cache[dep] |
| 1398 | if not iscurrent: | 1398 | if not iscurrent: |
| 1399 | logger.debug(2, 'Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2)) | 1399 | logger.debug2('Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2)) |
| 1400 | else: | 1400 | else: |
| 1401 | iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache) | 1401 | iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache) |
| 1402 | cache[dep] = iscurrent | 1402 | cache[dep] = iscurrent |
| @@ -1761,7 +1761,7 @@ class RunQueueExecute: | |||
| 1761 | for scheduler in schedulers: | 1761 | for scheduler in schedulers: |
| 1762 | if self.scheduler == scheduler.name: | 1762 | if self.scheduler == scheduler.name: |
| 1763 | self.sched = scheduler(self, self.rqdata) | 1763 | self.sched = scheduler(self, self.rqdata) |
| 1764 | logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name) | 1764 | logger.debug("Using runqueue scheduler '%s'", scheduler.name) |
| 1765 | break | 1765 | break |
| 1766 | else: | 1766 | else: |
| 1767 | bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % | 1767 | bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % |
| @@ -1899,7 +1899,7 @@ class RunQueueExecute: | |||
| 1899 | break | 1899 | break |
| 1900 | if alldeps: | 1900 | if alldeps: |
| 1901 | self.setbuildable(revdep) | 1901 | self.setbuildable(revdep) |
| 1902 | logger.debug(1, "Marking task %s as buildable", revdep) | 1902 | logger.debug("Marking task %s as buildable", revdep) |
| 1903 | 1903 | ||
| 1904 | def task_complete(self, task): | 1904 | def task_complete(self, task): |
| 1905 | self.stats.taskCompleted() | 1905 | self.stats.taskCompleted() |
| @@ -1929,7 +1929,7 @@ class RunQueueExecute: | |||
| 1929 | def summarise_scenequeue_errors(self): | 1929 | def summarise_scenequeue_errors(self): |
| 1930 | err = False | 1930 | err = False |
| 1931 | if not self.sqdone: | 1931 | if not self.sqdone: |
| 1932 | logger.debug(1, 'We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered))) | 1932 | logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered))) |
| 1933 | completeevent = sceneQueueComplete(self.sq_stats, self.rq) | 1933 | completeevent = sceneQueueComplete(self.sq_stats, self.rq) |
| 1934 | bb.event.fire(completeevent, self.cfgData) | 1934 | bb.event.fire(completeevent, self.cfgData) |
| 1935 | if self.sq_deferred: | 1935 | if self.sq_deferred: |
| @@ -1986,7 +1986,7 @@ class RunQueueExecute: | |||
| 1986 | if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values(): | 1986 | if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values(): |
| 1987 | if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]): | 1987 | if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]): |
| 1988 | if nexttask not in self.rqdata.target_tids: | 1988 | if nexttask not in self.rqdata.target_tids: |
| 1989 | logger.debug(2, "Skipping setscene for task %s" % nexttask) | 1989 | logger.debug2("Skipping setscene for task %s" % nexttask) |
| 1990 | self.sq_task_skip(nexttask) | 1990 | self.sq_task_skip(nexttask) |
| 1991 | self.scenequeue_notneeded.add(nexttask) | 1991 | self.scenequeue_notneeded.add(nexttask) |
| 1992 | if nexttask in self.sq_deferred: | 1992 | if nexttask in self.sq_deferred: |
| @@ -1999,28 +1999,28 @@ class RunQueueExecute: | |||
| 1999 | if nexttask in self.sq_deferred: | 1999 | if nexttask in self.sq_deferred: |
| 2000 | if self.sq_deferred[nexttask] not in self.runq_complete: | 2000 | if self.sq_deferred[nexttask] not in self.runq_complete: |
| 2001 | continue | 2001 | continue |
| 2002 | logger.debug(1, "Task %s no longer deferred" % nexttask) | 2002 | logger.debug("Task %s no longer deferred" % nexttask) |
| 2003 | del self.sq_deferred[nexttask] | 2003 | del self.sq_deferred[nexttask] |
| 2004 | valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) | 2004 | valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) |
| 2005 | if not valid: | 2005 | if not valid: |
| 2006 | logger.debug(1, "%s didn't become valid, skipping setscene" % nexttask) | 2006 | logger.debug("%s didn't become valid, skipping setscene" % nexttask) |
| 2007 | self.sq_task_failoutright(nexttask) | 2007 | self.sq_task_failoutright(nexttask) |
| 2008 | return True | 2008 | return True |
| 2009 | else: | 2009 | else: |
| 2010 | self.sqdata.outrightfail.remove(nexttask) | 2010 | self.sqdata.outrightfail.remove(nexttask) |
| 2011 | if nexttask in self.sqdata.outrightfail: | 2011 | if nexttask in self.sqdata.outrightfail: |
| 2012 | logger.debug(2, 'No package found, so skipping setscene task %s', nexttask) | 2012 | logger.debug2('No package found, so skipping setscene task %s', nexttask) |
| 2013 | self.sq_task_failoutright(nexttask) | 2013 | self.sq_task_failoutright(nexttask) |
| 2014 | return True | 2014 | return True |
| 2015 | if nexttask in self.sqdata.unskippable: | 2015 | if nexttask in self.sqdata.unskippable: |
| 2016 | logger.debug(2, "Setscene task %s is unskippable" % nexttask) | 2016 | logger.debug2("Setscene task %s is unskippable" % nexttask) |
| 2017 | task = nexttask | 2017 | task = nexttask |
| 2018 | break | 2018 | break |
| 2019 | if task is not None: | 2019 | if task is not None: |
| 2020 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) | 2020 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) |
| 2021 | taskname = taskname + "_setscene" | 2021 | taskname = taskname + "_setscene" |
| 2022 | if self.rq.check_stamp_task(task, taskname_from_tid(task), recurse = True, cache=self.stampcache): | 2022 | if self.rq.check_stamp_task(task, taskname_from_tid(task), recurse = True, cache=self.stampcache): |
| 2023 | logger.debug(2, 'Stamp for underlying task %s is current, so skipping setscene variant', task) | 2023 | logger.debug2('Stamp for underlying task %s is current, so skipping setscene variant', task) |
| 2024 | self.sq_task_failoutright(task) | 2024 | self.sq_task_failoutright(task) |
| 2025 | return True | 2025 | return True |
| 2026 | 2026 | ||
| @@ -2030,12 +2030,12 @@ class RunQueueExecute: | |||
| 2030 | return True | 2030 | return True |
| 2031 | 2031 | ||
| 2032 | if self.rq.check_stamp_task(task, taskname, cache=self.stampcache): | 2032 | if self.rq.check_stamp_task(task, taskname, cache=self.stampcache): |
| 2033 | logger.debug(2, 'Setscene stamp current task %s, so skip it and its dependencies', task) | 2033 | logger.debug2('Setscene stamp current task %s, so skip it and its dependencies', task) |
| 2034 | self.sq_task_skip(task) | 2034 | self.sq_task_skip(task) |
| 2035 | return True | 2035 | return True |
| 2036 | 2036 | ||
| 2037 | if self.cooker.configuration.skipsetscene: | 2037 | if self.cooker.configuration.skipsetscene: |
| 2038 | logger.debug(2, 'No setscene tasks should be executed. Skipping %s', task) | 2038 | logger.debug2('No setscene tasks should be executed. Skipping %s', task) |
| 2039 | self.sq_task_failoutright(task) | 2039 | self.sq_task_failoutright(task) |
| 2040 | return True | 2040 | return True |
| 2041 | 2041 | ||
| @@ -2097,12 +2097,12 @@ class RunQueueExecute: | |||
| 2097 | return True | 2097 | return True |
| 2098 | 2098 | ||
| 2099 | if task in self.tasks_covered: | 2099 | if task in self.tasks_covered: |
| 2100 | logger.debug(2, "Setscene covered task %s", task) | 2100 | logger.debug2("Setscene covered task %s", task) |
| 2101 | self.task_skip(task, "covered") | 2101 | self.task_skip(task, "covered") |
| 2102 | return True | 2102 | return True |
| 2103 | 2103 | ||
| 2104 | if self.rq.check_stamp_task(task, taskname, cache=self.stampcache): | 2104 | if self.rq.check_stamp_task(task, taskname, cache=self.stampcache): |
| 2105 | logger.debug(2, "Stamp current task %s", task) | 2105 | logger.debug2("Stamp current task %s", task) |
| 2106 | 2106 | ||
| 2107 | self.task_skip(task, "existing") | 2107 | self.task_skip(task, "existing") |
| 2108 | self.runq_tasksrun.add(task) | 2108 | self.runq_tasksrun.add(task) |
| @@ -2322,7 +2322,7 @@ class RunQueueExecute: | |||
| 2322 | remapped = True | 2322 | remapped = True |
| 2323 | 2323 | ||
| 2324 | if not remapped: | 2324 | if not remapped: |
| 2325 | #logger.debug(1, "Task %s hash changes: %s->%s %s->%s" % (tid, orighash, newhash, origuni, newuni)) | 2325 | #logger.debug("Task %s hash changes: %s->%s %s->%s" % (tid, orighash, newhash, origuni, newuni)) |
| 2326 | self.rqdata.runtaskentries[tid].hash = newhash | 2326 | self.rqdata.runtaskentries[tid].hash = newhash |
| 2327 | self.rqdata.runtaskentries[tid].unihash = newuni | 2327 | self.rqdata.runtaskentries[tid].unihash = newuni |
| 2328 | changed.add(tid) | 2328 | changed.add(tid) |
| @@ -2337,7 +2337,7 @@ class RunQueueExecute: | |||
| 2337 | for mc in self.rq.fakeworker: | 2337 | for mc in self.rq.fakeworker: |
| 2338 | self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") | 2338 | self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") |
| 2339 | 2339 | ||
| 2340 | hashequiv_logger.debug(1, pprint.pformat("Tasks changed:\n%s" % (changed))) | 2340 | hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed))) |
| 2341 | 2341 | ||
| 2342 | for tid in changed: | 2342 | for tid in changed: |
| 2343 | if tid not in self.rqdata.runq_setscene_tids: | 2343 | if tid not in self.rqdata.runq_setscene_tids: |
| @@ -2356,7 +2356,7 @@ class RunQueueExecute: | |||
| 2356 | # Check no tasks this covers are running | 2356 | # Check no tasks this covers are running |
| 2357 | for dep in self.sqdata.sq_covered_tasks[tid]: | 2357 | for dep in self.sqdata.sq_covered_tasks[tid]: |
| 2358 | if dep in self.runq_running and dep not in self.runq_complete: | 2358 | if dep in self.runq_running and dep not in self.runq_complete: |
| 2359 | hashequiv_logger.debug(2, "Task %s is running which blocks setscene for %s from running" % (dep, tid)) | 2359 | hashequiv_logger.debug2("Task %s is running which blocks setscene for %s from running" % (dep, tid)) |
| 2360 | valid = False | 2360 | valid = False |
| 2361 | break | 2361 | break |
| 2362 | if not valid: | 2362 | if not valid: |
| @@ -2430,7 +2430,7 @@ class RunQueueExecute: | |||
| 2430 | 2430 | ||
| 2431 | for dep in sorted(self.sqdata.sq_deps[task]): | 2431 | for dep in sorted(self.sqdata.sq_deps[task]): |
| 2432 | if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]: | 2432 | if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]: |
| 2433 | logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) | 2433 | logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) |
| 2434 | self.sq_task_failoutright(dep) | 2434 | self.sq_task_failoutright(dep) |
| 2435 | continue | 2435 | continue |
| 2436 | if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): | 2436 | if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): |
| @@ -2460,7 +2460,7 @@ class RunQueueExecute: | |||
| 2460 | completed dependencies as buildable | 2460 | completed dependencies as buildable |
| 2461 | """ | 2461 | """ |
| 2462 | 2462 | ||
| 2463 | logger.debug(1, 'Found task %s which could be accelerated', task) | 2463 | logger.debug('Found task %s which could be accelerated', task) |
| 2464 | self.scenequeue_covered.add(task) | 2464 | self.scenequeue_covered.add(task) |
| 2465 | self.scenequeue_updatecounters(task) | 2465 | self.scenequeue_updatecounters(task) |
| 2466 | 2466 | ||
| @@ -2775,13 +2775,13 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s | |||
| 2775 | continue | 2775 | continue |
| 2776 | 2776 | ||
| 2777 | if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache): | 2777 | if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache): |
| 2778 | logger.debug(2, 'Setscene stamp current for task %s', tid) | 2778 | logger.debug2('Setscene stamp current for task %s', tid) |
| 2779 | sqdata.stamppresent.add(tid) | 2779 | sqdata.stamppresent.add(tid) |
| 2780 | sqrq.sq_task_skip(tid) | 2780 | sqrq.sq_task_skip(tid) |
| 2781 | continue | 2781 | continue |
| 2782 | 2782 | ||
| 2783 | if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache): | 2783 | if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache): |
| 2784 | logger.debug(2, 'Normal stamp current for task %s', tid) | 2784 | logger.debug2('Normal stamp current for task %s', tid) |
| 2785 | sqdata.stamppresent.add(tid) | 2785 | sqdata.stamppresent.add(tid) |
| 2786 | sqrq.sq_task_skip(tid) | 2786 | sqrq.sq_task_skip(tid) |
| 2787 | continue | 2787 | continue |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index e0e03318a6..0d88c6ec68 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
| @@ -541,7 +541,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 541 | # is much more interesting, so it is reported at debug level 1 | 541 | # is much more interesting, so it is reported at debug level 1 |
| 542 | hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) | 542 | hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) |
| 543 | else: | 543 | else: |
| 544 | hashequiv_logger.debug(2, 'No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) | 544 | hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) |
| 545 | except hashserv.client.HashConnectionError as e: | 545 | except hashserv.client.HashConnectionError as e: |
| 546 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | 546 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
| 547 | 547 | ||
| @@ -615,12 +615,12 @@ class SignatureGeneratorUniHashMixIn(object): | |||
| 615 | new_unihash = data['unihash'] | 615 | new_unihash = data['unihash'] |
| 616 | 616 | ||
| 617 | if new_unihash != unihash: | 617 | if new_unihash != unihash: |
| 618 | hashequiv_logger.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) | 618 | hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) |
| 619 | bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d) | 619 | bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d) |
| 620 | self.set_unihash(tid, new_unihash) | 620 | self.set_unihash(tid, new_unihash) |
| 621 | d.setVar('BB_UNIHASH', new_unihash) | 621 | d.setVar('BB_UNIHASH', new_unihash) |
| 622 | else: | 622 | else: |
| 623 | hashequiv_logger.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) | 623 | hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) |
| 624 | except hashserv.client.HashConnectionError as e: | 624 | except hashserv.client.HashConnectionError as e: |
| 625 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) | 625 | bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) |
| 626 | finally: | 626 | finally: |
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py index ffbaf362e8..47bad6d1fa 100644 --- a/bitbake/lib/bb/taskdata.py +++ b/bitbake/lib/bb/taskdata.py | |||
| @@ -131,7 +131,7 @@ class TaskData: | |||
| 131 | for depend in dataCache.deps[fn]: | 131 | for depend in dataCache.deps[fn]: |
| 132 | dependids.add(depend) | 132 | dependids.add(depend) |
| 133 | self.depids[fn] = list(dependids) | 133 | self.depids[fn] = list(dependids) |
| 134 | logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn) | 134 | logger.debug2("Added dependencies %s for %s", str(dataCache.deps[fn]), fn) |
| 135 | 135 | ||
| 136 | # Work out runtime dependencies | 136 | # Work out runtime dependencies |
| 137 | if not fn in self.rdepids: | 137 | if not fn in self.rdepids: |
| @@ -149,9 +149,9 @@ class TaskData: | |||
| 149 | rreclist.append(rdepend) | 149 | rreclist.append(rdepend) |
| 150 | rdependids.add(rdepend) | 150 | rdependids.add(rdepend) |
| 151 | if rdependlist: | 151 | if rdependlist: |
| 152 | logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn) | 152 | logger.debug2("Added runtime dependencies %s for %s", str(rdependlist), fn) |
| 153 | if rreclist: | 153 | if rreclist: |
| 154 | logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn) | 154 | logger.debug2("Added runtime recommendations %s for %s", str(rreclist), fn) |
| 155 | self.rdepids[fn] = list(rdependids) | 155 | self.rdepids[fn] = list(rdependids) |
| 156 | 156 | ||
| 157 | for dep in self.depids[fn]: | 157 | for dep in self.depids[fn]: |
| @@ -378,7 +378,7 @@ class TaskData: | |||
| 378 | for fn in eligible: | 378 | for fn in eligible: |
| 379 | if fn in self.failed_fns: | 379 | if fn in self.failed_fns: |
| 380 | continue | 380 | continue |
| 381 | logger.debug(2, "adding %s to satisfy %s", fn, item) | 381 | logger.debug2("adding %s to satisfy %s", fn, item) |
| 382 | self.add_build_target(fn, item) | 382 | self.add_build_target(fn, item) |
| 383 | self.add_tasks(fn, dataCache) | 383 | self.add_tasks(fn, dataCache) |
| 384 | 384 | ||
| @@ -431,7 +431,7 @@ class TaskData: | |||
| 431 | for fn in eligible: | 431 | for fn in eligible: |
| 432 | if fn in self.failed_fns: | 432 | if fn in self.failed_fns: |
| 433 | continue | 433 | continue |
| 434 | logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item) | 434 | logger.debug2("adding '%s' to satisfy runtime '%s'", fn, item) |
| 435 | self.add_runtime_target(fn, item) | 435 | self.add_runtime_target(fn, item) |
| 436 | self.add_tasks(fn, dataCache) | 436 | self.add_tasks(fn, dataCache) |
| 437 | 437 | ||
| @@ -446,7 +446,7 @@ class TaskData: | |||
| 446 | return | 446 | return |
| 447 | if not missing_list: | 447 | if not missing_list: |
| 448 | missing_list = [] | 448 | missing_list = [] |
| 449 | logger.debug(1, "File '%s' is unbuildable, removing...", fn) | 449 | logger.debug("File '%s' is unbuildable, removing...", fn) |
| 450 | self.failed_fns.append(fn) | 450 | self.failed_fns.append(fn) |
| 451 | for target in self.build_targets: | 451 | for target in self.build_targets: |
| 452 | if fn in self.build_targets[target]: | 452 | if fn in self.build_targets[target]: |
| @@ -526,7 +526,7 @@ class TaskData: | |||
| 526 | added = added + 1 | 526 | added = added + 1 |
| 527 | except (bb.providers.NoRProvider, bb.providers.MultipleRProvider): | 527 | except (bb.providers.NoRProvider, bb.providers.MultipleRProvider): |
| 528 | self.remove_runtarget(target) | 528 | self.remove_runtarget(target) |
| 529 | logger.debug(1, "Resolved " + str(added) + " extra dependencies") | 529 | logger.debug("Resolved " + str(added) + " extra dependencies") |
| 530 | if added == 0: | 530 | if added == 0: |
| 531 | break | 531 | break |
| 532 | # self.dump_data() | 532 | # self.dump_data() |
| @@ -549,38 +549,38 @@ class TaskData: | |||
| 549 | """ | 549 | """ |
| 550 | Dump some debug information on the internal data structures | 550 | Dump some debug information on the internal data structures |
| 551 | """ | 551 | """ |
| 552 | logger.debug(3, "build_names:") | 552 | logger.debug3("build_names:") |
| 553 | logger.debug(3, ", ".join(self.build_targets)) | 553 | logger.debug3(", ".join(self.build_targets)) |
| 554 | 554 | ||
| 555 | logger.debug(3, "run_names:") | 555 | logger.debug3("run_names:") |
| 556 | logger.debug(3, ", ".join(self.run_targets)) | 556 | logger.debug3(", ".join(self.run_targets)) |
| 557 | 557 | ||
| 558 | logger.debug(3, "build_targets:") | 558 | logger.debug3("build_targets:") |
| 559 | for target in self.build_targets: | 559 | for target in self.build_targets: |
| 560 | targets = "None" | 560 | targets = "None" |
| 561 | if target in self.build_targets: | 561 | if target in self.build_targets: |
| 562 | targets = self.build_targets[target] | 562 | targets = self.build_targets[target] |
| 563 | logger.debug(3, " %s: %s", target, targets) | 563 | logger.debug3(" %s: %s", target, targets) |
| 564 | 564 | ||
| 565 | logger.debug(3, "run_targets:") | 565 | logger.debug3("run_targets:") |
| 566 | for target in self.run_targets: | 566 | for target in self.run_targets: |
| 567 | targets = "None" | 567 | targets = "None" |
| 568 | if target in self.run_targets: | 568 | if target in self.run_targets: |
| 569 | targets = self.run_targets[target] | 569 | targets = self.run_targets[target] |
| 570 | logger.debug(3, " %s: %s", target, targets) | 570 | logger.debug3(" %s: %s", target, targets) |
| 571 | 571 | ||
| 572 | logger.debug(3, "tasks:") | 572 | logger.debug3("tasks:") |
| 573 | for tid in self.taskentries: | 573 | for tid in self.taskentries: |
| 574 | logger.debug(3, " %s: %s %s %s", | 574 | logger.debug3(" %s: %s %s %s", |
| 575 | tid, | 575 | tid, |
| 576 | self.taskentries[tid].idepends, | 576 | self.taskentries[tid].idepends, |
| 577 | self.taskentries[tid].irdepends, | 577 | self.taskentries[tid].irdepends, |
| 578 | self.taskentries[tid].tdepends) | 578 | self.taskentries[tid].tdepends) |
| 579 | 579 | ||
| 580 | logger.debug(3, "dependency ids (per fn):") | 580 | logger.debug3("dependency ids (per fn):") |
| 581 | for fn in self.depids: | 581 | for fn in self.depids: |
| 582 | logger.debug(3, " %s: %s", fn, self.depids[fn]) | 582 | logger.debug3(" %s: %s", fn, self.depids[fn]) |
| 583 | 583 | ||
| 584 | logger.debug(3, "runtime dependency ids (per fn):") | 584 | logger.debug3("runtime dependency ids (per fn):") |
| 585 | for fn in self.rdepids: | 585 | for fn in self.rdepids: |
| 586 | logger.debug(3, " %s: %s", fn, self.rdepids[fn]) | 586 | logger.debug3(" %s: %s", fn, self.rdepids[fn]) |
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py index 82c62e3324..43aa592842 100644 --- a/bitbake/lib/bb/ui/buildinfohelper.py +++ b/bitbake/lib/bb/ui/buildinfohelper.py | |||
| @@ -148,14 +148,14 @@ class ORMWrapper(object): | |||
| 148 | buildrequest = None | 148 | buildrequest = None |
| 149 | if brbe is not None: | 149 | if brbe is not None: |
| 150 | # Toaster-triggered build | 150 | # Toaster-triggered build |
| 151 | logger.debug(1, "buildinfohelper: brbe is %s" % brbe) | 151 | logger.debug("buildinfohelper: brbe is %s" % brbe) |
| 152 | br, _ = brbe.split(":") | 152 | br, _ = brbe.split(":") |
| 153 | buildrequest = BuildRequest.objects.get(pk=br) | 153 | buildrequest = BuildRequest.objects.get(pk=br) |
| 154 | prj = buildrequest.project | 154 | prj = buildrequest.project |
| 155 | else: | 155 | else: |
| 156 | # CLI build | 156 | # CLI build |
| 157 | prj = Project.objects.get_or_create_default_project() | 157 | prj = Project.objects.get_or_create_default_project() |
| 158 | logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj) | 158 | logger.debug("buildinfohelper: project is not specified, defaulting to %s" % prj) |
| 159 | 159 | ||
| 160 | if buildrequest is not None: | 160 | if buildrequest is not None: |
| 161 | # reuse existing Build object | 161 | # reuse existing Build object |
| @@ -171,7 +171,7 @@ class ORMWrapper(object): | |||
| 171 | completed_on=now, | 171 | completed_on=now, |
| 172 | build_name='') | 172 | build_name='') |
| 173 | 173 | ||
| 174 | logger.debug(1, "buildinfohelper: build is created %s" % build) | 174 | logger.debug("buildinfohelper: build is created %s" % build) |
| 175 | 175 | ||
| 176 | if buildrequest is not None: | 176 | if buildrequest is not None: |
| 177 | buildrequest.build = build | 177 | buildrequest.build = build |
| @@ -906,7 +906,7 @@ class BuildInfoHelper(object): | |||
| 906 | 906 | ||
| 907 | self.project = None | 907 | self.project = None |
| 908 | 908 | ||
| 909 | logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self)) | 909 | logger.debug("buildinfohelper: Build info helper inited %s" % vars(self)) |
| 910 | 910 | ||
| 911 | 911 | ||
| 912 | ################### | 912 | ################### |
| @@ -1620,7 +1620,7 @@ class BuildInfoHelper(object): | |||
| 1620 | # if we have a backlog of events, do our best to save them here | 1620 | # if we have a backlog of events, do our best to save them here |
| 1621 | if len(self.internal_state['backlog']): | 1621 | if len(self.internal_state['backlog']): |
| 1622 | tempevent = self.internal_state['backlog'].pop() | 1622 | tempevent = self.internal_state['backlog'].pop() |
| 1623 | logger.debug(1, "buildinfohelper: Saving stored event %s " | 1623 | logger.debug("buildinfohelper: Saving stored event %s " |
| 1624 | % tempevent) | 1624 | % tempevent) |
| 1625 | self.store_log_event(tempevent,cli_backlog) | 1625 | self.store_log_event(tempevent,cli_backlog) |
| 1626 | else: | 1626 | else: |
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py index 5c775bd8a8..b282d09abf 100644 --- a/bitbake/lib/bb/utils.py +++ b/bitbake/lib/bb/utils.py | |||
| @@ -609,7 +609,7 @@ def filter_environment(good_vars): | |||
| 609 | os.environ["LC_ALL"] = "en_US.UTF-8" | 609 | os.environ["LC_ALL"] = "en_US.UTF-8" |
| 610 | 610 | ||
| 611 | if removed_vars: | 611 | if removed_vars: |
| 612 | logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys())) | 612 | logger.debug("Removed the following variables from the environment: %s", ", ".join(removed_vars.keys())) |
| 613 | 613 | ||
| 614 | return removed_vars | 614 | return removed_vars |
| 615 | 615 | ||
| @@ -1613,12 +1613,12 @@ def export_proxies(d): | |||
| 1613 | 1613 | ||
| 1614 | def load_plugins(logger, plugins, pluginpath): | 1614 | def load_plugins(logger, plugins, pluginpath): |
| 1615 | def load_plugin(name): | 1615 | def load_plugin(name): |
| 1616 | logger.debug(1, 'Loading plugin %s' % name) | 1616 | logger.debug('Loading plugin %s' % name) |
| 1617 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) | 1617 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) |
| 1618 | if spec: | 1618 | if spec: |
| 1619 | return spec.loader.load_module() | 1619 | return spec.loader.load_module() |
| 1620 | 1620 | ||
| 1621 | logger.debug(1, 'Loading plugins from %s...' % pluginpath) | 1621 | logger.debug('Loading plugins from %s...' % pluginpath) |
| 1622 | 1622 | ||
| 1623 | expanded = (glob.glob(os.path.join(pluginpath, '*' + ext)) | 1623 | expanded = (glob.glob(os.path.join(pluginpath, '*' + ext)) |
| 1624 | for ext in python_extensions) | 1624 | for ext in python_extensions) |
diff --git a/bitbake/lib/bblayers/layerindex.py b/bitbake/lib/bblayers/layerindex.py index 95b67a6621..b2f27b21ee 100644 --- a/bitbake/lib/bblayers/layerindex.py +++ b/bitbake/lib/bblayers/layerindex.py | |||
| @@ -79,7 +79,7 @@ class LayerIndexPlugin(ActionPlugin): | |||
| 79 | branches = [args.branch] | 79 | branches = [args.branch] |
| 80 | else: | 80 | else: |
| 81 | branches = (self.tinfoil.config_data.getVar('LAYERSERIES_CORENAMES') or 'master').split() | 81 | branches = (self.tinfoil.config_data.getVar('LAYERSERIES_CORENAMES') or 'master').split() |
| 82 | logger.debug(1, 'Trying branches: %s' % branches) | 82 | logger.debug('Trying branches: %s' % branches) |
| 83 | 83 | ||
| 84 | ignore_layers = [] | 84 | ignore_layers = [] |
| 85 | if args.ignore: | 85 | if args.ignore: |
diff --git a/bitbake/lib/layerindexlib/__init__.py b/bitbake/lib/layerindexlib/__init__.py index 45157b6681..9ca127b9df 100644 --- a/bitbake/lib/layerindexlib/__init__.py +++ b/bitbake/lib/layerindexlib/__init__.py | |||
| @@ -94,7 +94,7 @@ class LayerIndex(): | |||
| 94 | if not param: | 94 | if not param: |
| 95 | continue | 95 | continue |
| 96 | item = param.split('=', 1) | 96 | item = param.split('=', 1) |
| 97 | logger.debug(1, item) | 97 | logger.debug(item) |
| 98 | param_dict[item[0]] = item[1] | 98 | param_dict[item[0]] = item[1] |
| 99 | 99 | ||
| 100 | return param_dict | 100 | return param_dict |
| @@ -123,7 +123,7 @@ class LayerIndex(): | |||
| 123 | up = urlparse(url) | 123 | up = urlparse(url) |
| 124 | 124 | ||
| 125 | if username: | 125 | if username: |
| 126 | logger.debug(1, "Configuring authentication for %s..." % url) | 126 | logger.debug("Configuring authentication for %s..." % url) |
| 127 | password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() | 127 | password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() |
| 128 | password_mgr.add_password(None, "%s://%s" % (up.scheme, up.netloc), username, password) | 128 | password_mgr.add_password(None, "%s://%s" % (up.scheme, up.netloc), username, password) |
| 129 | handler = urllib.request.HTTPBasicAuthHandler(password_mgr) | 129 | handler = urllib.request.HTTPBasicAuthHandler(password_mgr) |
| @@ -133,20 +133,20 @@ class LayerIndex(): | |||
| 133 | 133 | ||
| 134 | urllib.request.install_opener(opener) | 134 | urllib.request.install_opener(opener) |
| 135 | 135 | ||
| 136 | logger.debug(1, "Fetching %s (%s)..." % (url, ["without authentication", "with authentication"][bool(username)])) | 136 | logger.debug("Fetching %s (%s)..." % (url, ["without authentication", "with authentication"][bool(username)])) |
| 137 | 137 | ||
| 138 | try: | 138 | try: |
| 139 | res = urlopen(Request(url, headers={'User-Agent': 'Mozilla/5.0 (bitbake/lib/layerindex)'}, unverifiable=True)) | 139 | res = urlopen(Request(url, headers={'User-Agent': 'Mozilla/5.0 (bitbake/lib/layerindex)'}, unverifiable=True)) |
| 140 | except urllib.error.HTTPError as e: | 140 | except urllib.error.HTTPError as e: |
| 141 | logger.debug(1, "HTTP Error: %s: %s" % (e.code, e.reason)) | 141 | logger.debug("HTTP Error: %s: %s" % (e.code, e.reason)) |
| 142 | logger.debug(1, " Requested: %s" % (url)) | 142 | logger.debug(" Requested: %s" % (url)) |
| 143 | logger.debug(1, " Actual: %s" % (e.geturl())) | 143 | logger.debug(" Actual: %s" % (e.geturl())) |
| 144 | 144 | ||
| 145 | if e.code == 404: | 145 | if e.code == 404: |
| 146 | logger.debug(1, "Request not found.") | 146 | logger.debug("Request not found.") |
| 147 | raise LayerIndexFetchError(url, e) | 147 | raise LayerIndexFetchError(url, e) |
| 148 | else: | 148 | else: |
| 149 | logger.debug(1, "Headers:\n%s" % (e.headers)) | 149 | logger.debug("Headers:\n%s" % (e.headers)) |
| 150 | raise LayerIndexFetchError(url, e) | 150 | raise LayerIndexFetchError(url, e) |
| 151 | except OSError as e: | 151 | except OSError as e: |
| 152 | error = 0 | 152 | error = 0 |
| @@ -170,7 +170,7 @@ class LayerIndex(): | |||
| 170 | raise LayerIndexFetchError(url, "Unable to fetch OSError exception: %s" % e) | 170 | raise LayerIndexFetchError(url, "Unable to fetch OSError exception: %s" % e) |
| 171 | 171 | ||
| 172 | finally: | 172 | finally: |
| 173 | logger.debug(1, "...fetching %s (%s), done." % (url, ["without authentication", "with authentication"][bool(username)])) | 173 | logger.debug("...fetching %s (%s), done." % (url, ["without authentication", "with authentication"][bool(username)])) |
| 174 | 174 | ||
| 175 | return res | 175 | return res |
| 176 | 176 | ||
| @@ -205,14 +205,14 @@ The format of the indexURI: | |||
| 205 | if reload: | 205 | if reload: |
| 206 | self.indexes = [] | 206 | self.indexes = [] |
| 207 | 207 | ||
| 208 | logger.debug(1, 'Loading: %s' % indexURI) | 208 | logger.debug('Loading: %s' % indexURI) |
| 209 | 209 | ||
| 210 | if not self.plugins: | 210 | if not self.plugins: |
| 211 | raise LayerIndexException("No LayerIndex Plugins available") | 211 | raise LayerIndexException("No LayerIndex Plugins available") |
| 212 | 212 | ||
| 213 | for plugin in self.plugins: | 213 | for plugin in self.plugins: |
| 214 | # Check if the plugin was initialized | 214 | # Check if the plugin was initialized |
| 215 | logger.debug(1, 'Trying %s' % plugin.__class__) | 215 | logger.debug('Trying %s' % plugin.__class__) |
| 216 | if not hasattr(plugin, 'type') or not plugin.type: | 216 | if not hasattr(plugin, 'type') or not plugin.type: |
| 217 | continue | 217 | continue |
| 218 | try: | 218 | try: |
| @@ -220,11 +220,11 @@ The format of the indexURI: | |||
| 220 | indexEnt = plugin.load_index(indexURI, load) | 220 | indexEnt = plugin.load_index(indexURI, load) |
| 221 | break | 221 | break |
| 222 | except LayerIndexPluginUrlError as e: | 222 | except LayerIndexPluginUrlError as e: |
| 223 | logger.debug(1, "%s doesn't support %s" % (plugin.type, e.url)) | 223 | logger.debug("%s doesn't support %s" % (plugin.type, e.url)) |
| 224 | except NotImplementedError: | 224 | except NotImplementedError: |
| 225 | pass | 225 | pass |
| 226 | else: | 226 | else: |
| 227 | logger.debug(1, "No plugins support %s" % indexURI) | 227 | logger.debug("No plugins support %s" % indexURI) |
| 228 | raise LayerIndexException("No plugins support %s" % indexURI) | 228 | raise LayerIndexException("No plugins support %s" % indexURI) |
| 229 | 229 | ||
| 230 | # Mark CONFIG data as something we've added... | 230 | # Mark CONFIG data as something we've added... |
| @@ -255,19 +255,19 @@ will write out the individual elements split by layer and related components. | |||
| 255 | 255 | ||
| 256 | for plugin in self.plugins: | 256 | for plugin in self.plugins: |
| 257 | # Check if the plugin was initialized | 257 | # Check if the plugin was initialized |
| 258 | logger.debug(1, 'Trying %s' % plugin.__class__) | 258 | logger.debug('Trying %s' % plugin.__class__) |
| 259 | if not hasattr(plugin, 'type') or not plugin.type: | 259 | if not hasattr(plugin, 'type') or not plugin.type: |
| 260 | continue | 260 | continue |
| 261 | try: | 261 | try: |
| 262 | plugin.store_index(indexURI, index) | 262 | plugin.store_index(indexURI, index) |
| 263 | break | 263 | break |
| 264 | except LayerIndexPluginUrlError as e: | 264 | except LayerIndexPluginUrlError as e: |
| 265 | logger.debug(1, "%s doesn't support %s" % (plugin.type, e.url)) | 265 | logger.debug("%s doesn't support %s" % (plugin.type, e.url)) |
| 266 | except NotImplementedError: | 266 | except NotImplementedError: |
| 267 | logger.debug(1, "Store not implemented in %s" % plugin.type) | 267 | logger.debug("Store not implemented in %s" % plugin.type) |
| 268 | pass | 268 | pass |
| 269 | else: | 269 | else: |
| 270 | logger.debug(1, "No plugins support %s" % indexURI) | 270 | logger.debug("No plugins support %s" % indexURI) |
| 271 | raise LayerIndexException("No plugins support %s" % indexURI) | 271 | raise LayerIndexException("No plugins support %s" % indexURI) |
| 272 | 272 | ||
| 273 | 273 | ||
| @@ -292,7 +292,7 @@ layerBranches set. If not, they are effectively blank.''' | |||
| 292 | the default configuration until the first vcs_url/branch match.''' | 292 | the default configuration until the first vcs_url/branch match.''' |
| 293 | 293 | ||
| 294 | for index in self.indexes: | 294 | for index in self.indexes: |
| 295 | logger.debug(1, ' searching %s' % index.config['DESCRIPTION']) | 295 | logger.debug(' searching %s' % index.config['DESCRIPTION']) |
| 296 | layerBranch = index.find_vcs_url(vcs_url, [branch]) | 296 | layerBranch = index.find_vcs_url(vcs_url, [branch]) |
| 297 | if layerBranch: | 297 | if layerBranch: |
| 298 | return layerBranch | 298 | return layerBranch |
| @@ -304,7 +304,7 @@ layerBranches set. If not, they are effectively blank.''' | |||
| 304 | If a branch has not been specified, we will iterate over the branches in | 304 | If a branch has not been specified, we will iterate over the branches in |
| 305 | the default configuration until the first collection/branch match.''' | 305 | the default configuration until the first collection/branch match.''' |
| 306 | 306 | ||
| 307 | logger.debug(1, 'find_collection: %s (%s) %s' % (collection, version, branch)) | 307 | logger.debug('find_collection: %s (%s) %s' % (collection, version, branch)) |
| 308 | 308 | ||
| 309 | if branch: | 309 | if branch: |
| 310 | branches = [branch] | 310 | branches = [branch] |
| @@ -312,12 +312,12 @@ layerBranches set. If not, they are effectively blank.''' | |||
| 312 | branches = None | 312 | branches = None |
| 313 | 313 | ||
| 314 | for index in self.indexes: | 314 | for index in self.indexes: |
| 315 | logger.debug(1, ' searching %s' % index.config['DESCRIPTION']) | 315 | logger.debug(' searching %s' % index.config['DESCRIPTION']) |
| 316 | layerBranch = index.find_collection(collection, version, branches) | 316 | layerBranch = index.find_collection(collection, version, branches) |
| 317 | if layerBranch: | 317 | if layerBranch: |
| 318 | return layerBranch | 318 | return layerBranch |
| 319 | else: | 319 | else: |
| 320 | logger.debug(1, 'Collection %s (%s) not found for branch (%s)' % (collection, version, branch)) | 320 | logger.debug('Collection %s (%s) not found for branch (%s)' % (collection, version, branch)) |
| 321 | return None | 321 | return None |
| 322 | 322 | ||
| 323 | def find_layerbranch(self, name, branch=None): | 323 | def find_layerbranch(self, name, branch=None): |
| @@ -408,7 +408,7 @@ layerBranches set. If not, they are effectively blank.''' | |||
| 408 | version=deplayerbranch.version | 408 | version=deplayerbranch.version |
| 409 | ) | 409 | ) |
| 410 | if rdeplayerbranch != deplayerbranch: | 410 | if rdeplayerbranch != deplayerbranch: |
| 411 | logger.debug(1, 'Replaced %s:%s:%s with %s:%s:%s' % \ | 411 | logger.debug('Replaced %s:%s:%s with %s:%s:%s' % \ |
| 412 | (deplayerbranch.index.config['DESCRIPTION'], | 412 | (deplayerbranch.index.config['DESCRIPTION'], |
| 413 | deplayerbranch.branch.name, | 413 | deplayerbranch.branch.name, |
| 414 | deplayerbranch.layer.name, | 414 | deplayerbranch.layer.name, |
| @@ -1121,7 +1121,7 @@ class LayerBranch(LayerIndexItemObj): | |||
| 1121 | @property | 1121 | @property |
| 1122 | def branch(self): | 1122 | def branch(self): |
| 1123 | try: | 1123 | try: |
| 1124 | logger.debug(1, "Get branch object from branches[%s]" % (self.branch_id)) | 1124 | logger.debug("Get branch object from branches[%s]" % (self.branch_id)) |
| 1125 | return self.index.branches[self.branch_id] | 1125 | return self.index.branches[self.branch_id] |
| 1126 | except KeyError: | 1126 | except KeyError: |
| 1127 | raise AttributeError('Unable to find branches in index to map branch_id %s' % self.branch_id) | 1127 | raise AttributeError('Unable to find branches in index to map branch_id %s' % self.branch_id) |
| @@ -1149,7 +1149,7 @@ class LayerBranch(LayerIndexItemObj): | |||
| 1149 | 1149 | ||
| 1150 | @actual_branch.setter | 1150 | @actual_branch.setter |
| 1151 | def actual_branch(self, value): | 1151 | def actual_branch(self, value): |
| 1152 | logger.debug(1, "Set actual_branch to %s .. name is %s" % (value, self.branch.name)) | 1152 | logger.debug("Set actual_branch to %s .. name is %s" % (value, self.branch.name)) |
| 1153 | if value != self.branch.name: | 1153 | if value != self.branch.name: |
| 1154 | self._setattr('actual_branch', value, prop=False) | 1154 | self._setattr('actual_branch', value, prop=False) |
| 1155 | else: | 1155 | else: |
diff --git a/bitbake/lib/layerindexlib/cooker.py b/bitbake/lib/layerindexlib/cooker.py index 21ec438a22..2de6e5faa0 100644 --- a/bitbake/lib/layerindexlib/cooker.py +++ b/bitbake/lib/layerindexlib/cooker.py | |||
| @@ -173,7 +173,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 173 | else: | 173 | else: |
| 174 | branches = ['HEAD'] | 174 | branches = ['HEAD'] |
| 175 | 175 | ||
| 176 | logger.debug(1, "Loading cooker data branches %s" % branches) | 176 | logger.debug("Loading cooker data branches %s" % branches) |
| 177 | 177 | ||
| 178 | index = self._load_bblayers(branches=branches) | 178 | index = self._load_bblayers(branches=branches) |
| 179 | 179 | ||
| @@ -220,7 +220,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 220 | required=required, layerbranch=layerBranchId, | 220 | required=required, layerbranch=layerBranchId, |
| 221 | dependency=depLayerBranch.layer_id) | 221 | dependency=depLayerBranch.layer_id) |
| 222 | 222 | ||
| 223 | logger.debug(1, '%s requires %s' % (layerDependency.layer.name, layerDependency.dependency.name)) | 223 | logger.debug('%s requires %s' % (layerDependency.layer.name, layerDependency.dependency.name)) |
| 224 | index.add_element("layerDependencies", [layerDependency]) | 224 | index.add_element("layerDependencies", [layerDependency]) |
| 225 | 225 | ||
| 226 | return layerDependencyId | 226 | return layerDependencyId |
diff --git a/bitbake/lib/layerindexlib/restapi.py b/bitbake/lib/layerindexlib/restapi.py index 7023f42f20..26a1c9674e 100644 --- a/bitbake/lib/layerindexlib/restapi.py +++ b/bitbake/lib/layerindexlib/restapi.py | |||
| @@ -82,7 +82,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 82 | 82 | ||
| 83 | 83 | ||
| 84 | def load_cache(path, index, branches=[]): | 84 | def load_cache(path, index, branches=[]): |
| 85 | logger.debug(1, 'Loading json file %s' % path) | 85 | logger.debug('Loading json file %s' % path) |
| 86 | with open(path, 'rt', encoding='utf-8') as f: | 86 | with open(path, 'rt', encoding='utf-8') as f: |
| 87 | pindex = json.load(f) | 87 | pindex = json.load(f) |
| 88 | 88 | ||
| @@ -102,7 +102,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 102 | if newpBranch: | 102 | if newpBranch: |
| 103 | index.add_raw_element('branches', layerindexlib.Branch, newpBranch) | 103 | index.add_raw_element('branches', layerindexlib.Branch, newpBranch) |
| 104 | else: | 104 | else: |
| 105 | logger.debug(1, 'No matching branches (%s) in index file(s)' % branches) | 105 | logger.debug('No matching branches (%s) in index file(s)' % branches) |
| 106 | # No matching branches.. return nothing... | 106 | # No matching branches.. return nothing... |
| 107 | return | 107 | return |
| 108 | 108 | ||
| @@ -120,7 +120,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 120 | load_cache(up.path, index, branches) | 120 | load_cache(up.path, index, branches) |
| 121 | return index | 121 | return index |
| 122 | 122 | ||
| 123 | logger.debug(1, 'Loading from dir %s...' % (up.path)) | 123 | logger.debug('Loading from dir %s...' % (up.path)) |
| 124 | for (dirpath, _, filenames) in os.walk(up.path): | 124 | for (dirpath, _, filenames) in os.walk(up.path): |
| 125 | for filename in filenames: | 125 | for filename in filenames: |
| 126 | if not filename.endswith('.json'): | 126 | if not filename.endswith('.json'): |
| @@ -144,7 +144,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 144 | def _get_json_response(apiurl=None, username=None, password=None, retry=True): | 144 | def _get_json_response(apiurl=None, username=None, password=None, retry=True): |
| 145 | assert apiurl is not None | 145 | assert apiurl is not None |
| 146 | 146 | ||
| 147 | logger.debug(1, "fetching %s" % apiurl) | 147 | logger.debug("fetching %s" % apiurl) |
| 148 | 148 | ||
| 149 | up = urlparse(apiurl) | 149 | up = urlparse(apiurl) |
| 150 | 150 | ||
| @@ -163,9 +163,9 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 163 | parsed = json.loads(res.read().decode('utf-8')) | 163 | parsed = json.loads(res.read().decode('utf-8')) |
| 164 | except ConnectionResetError: | 164 | except ConnectionResetError: |
| 165 | if retry: | 165 | if retry: |
| 166 | logger.debug(1, "%s: Connection reset by peer. Retrying..." % url) | 166 | logger.debug("%s: Connection reset by peer. Retrying..." % url) |
| 167 | parsed = _get_json_response(apiurl=up_stripped.geturl(), username=username, password=password, retry=False) | 167 | parsed = _get_json_response(apiurl=up_stripped.geturl(), username=username, password=password, retry=False) |
| 168 | logger.debug(1, "%s: retry successful.") | 168 | logger.debug("%s: retry successful.") |
| 169 | else: | 169 | else: |
| 170 | raise layerindexlib.LayerIndexFetchError('%s: Connection reset by peer. Is there a firewall blocking your connection?' % apiurl) | 170 | raise layerindexlib.LayerIndexFetchError('%s: Connection reset by peer. Is there a firewall blocking your connection?' % apiurl) |
| 171 | 171 | ||
| @@ -207,25 +207,25 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 207 | if "*" not in branches: | 207 | if "*" not in branches: |
| 208 | filter = "?filter=name:%s" % "OR".join(branches) | 208 | filter = "?filter=name:%s" % "OR".join(branches) |
| 209 | 209 | ||
| 210 | logger.debug(1, "Loading %s from %s" % (branches, index.apilinks['branches'])) | 210 | logger.debug("Loading %s from %s" % (branches, index.apilinks['branches'])) |
| 211 | 211 | ||
| 212 | # The link won't include username/password, so pull it from the original url | 212 | # The link won't include username/password, so pull it from the original url |
| 213 | pindex['branches'] = _get_json_response(index.apilinks['branches'] + filter, | 213 | pindex['branches'] = _get_json_response(index.apilinks['branches'] + filter, |
| 214 | username=up.username, password=up.password) | 214 | username=up.username, password=up.password) |
| 215 | if not pindex['branches']: | 215 | if not pindex['branches']: |
| 216 | logger.debug(1, "No valid branches (%s) found at url %s." % (branch, url)) | 216 | logger.debug("No valid branches (%s) found at url %s." % (branch, url)) |
| 217 | return index | 217 | return index |
| 218 | index.add_raw_element("branches", layerindexlib.Branch, pindex['branches']) | 218 | index.add_raw_element("branches", layerindexlib.Branch, pindex['branches']) |
| 219 | 219 | ||
| 220 | # Load all of the layerItems (these can not be easily filtered) | 220 | # Load all of the layerItems (these can not be easily filtered) |
| 221 | logger.debug(1, "Loading %s from %s" % ('layerItems', index.apilinks['layerItems'])) | 221 | logger.debug("Loading %s from %s" % ('layerItems', index.apilinks['layerItems'])) |
| 222 | 222 | ||
| 223 | 223 | ||
| 224 | # The link won't include username/password, so pull it from the original url | 224 | # The link won't include username/password, so pull it from the original url |
| 225 | pindex['layerItems'] = _get_json_response(index.apilinks['layerItems'], | 225 | pindex['layerItems'] = _get_json_response(index.apilinks['layerItems'], |
| 226 | username=up.username, password=up.password) | 226 | username=up.username, password=up.password) |
| 227 | if not pindex['layerItems']: | 227 | if not pindex['layerItems']: |
| 228 | logger.debug(1, "No layers were found at url %s." % (url)) | 228 | logger.debug("No layers were found at url %s." % (url)) |
| 229 | return index | 229 | return index |
| 230 | index.add_raw_element("layerItems", layerindexlib.LayerItem, pindex['layerItems']) | 230 | index.add_raw_element("layerItems", layerindexlib.LayerItem, pindex['layerItems']) |
| 231 | 231 | ||
| @@ -235,13 +235,13 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 235 | for branch in index.branches: | 235 | for branch in index.branches: |
| 236 | filter = "?filter=branch__name:%s" % index.branches[branch].name | 236 | filter = "?filter=branch__name:%s" % index.branches[branch].name |
| 237 | 237 | ||
| 238 | logger.debug(1, "Loading %s from %s" % ('layerBranches', index.apilinks['layerBranches'])) | 238 | logger.debug("Loading %s from %s" % ('layerBranches', index.apilinks['layerBranches'])) |
| 239 | 239 | ||
| 240 | # The link won't include username/password, so pull it from the original url | 240 | # The link won't include username/password, so pull it from the original url |
| 241 | pindex['layerBranches'] = _get_json_response(index.apilinks['layerBranches'] + filter, | 241 | pindex['layerBranches'] = _get_json_response(index.apilinks['layerBranches'] + filter, |
| 242 | username=up.username, password=up.password) | 242 | username=up.username, password=up.password) |
| 243 | if not pindex['layerBranches']: | 243 | if not pindex['layerBranches']: |
| 244 | logger.debug(1, "No valid layer branches (%s) found at url %s." % (branches or "*", url)) | 244 | logger.debug("No valid layer branches (%s) found at url %s." % (branches or "*", url)) |
| 245 | return index | 245 | return index |
| 246 | index.add_raw_element("layerBranches", layerindexlib.LayerBranch, pindex['layerBranches']) | 246 | index.add_raw_element("layerBranches", layerindexlib.LayerBranch, pindex['layerBranches']) |
| 247 | 247 | ||
| @@ -256,7 +256,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 256 | ("distros", layerindexlib.Distro)]: | 256 | ("distros", layerindexlib.Distro)]: |
| 257 | if lName not in load: | 257 | if lName not in load: |
| 258 | continue | 258 | continue |
| 259 | logger.debug(1, "Loading %s from %s" % (lName, index.apilinks[lName])) | 259 | logger.debug("Loading %s from %s" % (lName, index.apilinks[lName])) |
| 260 | 260 | ||
| 261 | # The link won't include username/password, so pull it from the original url | 261 | # The link won't include username/password, so pull it from the original url |
| 262 | pindex[lName] = _get_json_response(index.apilinks[lName] + filter, | 262 | pindex[lName] = _get_json_response(index.apilinks[lName] + filter, |
| @@ -283,7 +283,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 283 | if up.scheme != 'file': | 283 | if up.scheme != 'file': |
| 284 | raise layerindexlib.plugin.LayerIndexPluginUrlError(self.type, url) | 284 | raise layerindexlib.plugin.LayerIndexPluginUrlError(self.type, url) |
| 285 | 285 | ||
| 286 | logger.debug(1, "Storing to %s..." % up.path) | 286 | logger.debug("Storing to %s..." % up.path) |
| 287 | 287 | ||
| 288 | try: | 288 | try: |
| 289 | layerbranches = index.layerBranches | 289 | layerbranches = index.layerBranches |
| @@ -299,12 +299,12 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin): | |||
| 299 | if getattr(index, objects)[obj].layerbranch_id == layerbranchid: | 299 | if getattr(index, objects)[obj].layerbranch_id == layerbranchid: |
| 300 | filtered.append(getattr(index, objects)[obj]._data) | 300 | filtered.append(getattr(index, objects)[obj]._data) |
| 301 | except AttributeError: | 301 | except AttributeError: |
| 302 | logger.debug(1, 'No obj.layerbranch_id: %s' % objects) | 302 | logger.debug('No obj.layerbranch_id: %s' % objects) |
| 303 | # No simple filter method, just include it... | 303 | # No simple filter method, just include it... |
| 304 | try: | 304 | try: |
| 305 | filtered.append(getattr(index, objects)[obj]._data) | 305 | filtered.append(getattr(index, objects)[obj]._data) |
| 306 | except AttributeError: | 306 | except AttributeError: |
| 307 | logger.debug(1, 'No obj._data: %s %s' % (objects, type(obj))) | 307 | logger.debug('No obj._data: %s %s' % (objects, type(obj))) |
| 308 | filtered.append(obj) | 308 | filtered.append(obj) |
| 309 | return filtered | 309 | return filtered |
| 310 | 310 | ||
diff --git a/bitbake/lib/layerindexlib/tests/cooker.py b/bitbake/lib/layerindexlib/tests/cooker.py index 1d0685e099..5ddf89aa21 100644 --- a/bitbake/lib/layerindexlib/tests/cooker.py +++ b/bitbake/lib/layerindexlib/tests/cooker.py | |||
| @@ -72,7 +72,7 @@ class LayerIndexCookerTest(LayersTest): | |||
| 72 | 72 | ||
| 73 | def test_find_collection(self): | 73 | def test_find_collection(self): |
| 74 | def _check(collection, expected): | 74 | def _check(collection, expected): |
| 75 | self.logger.debug(1, "Looking for collection %s..." % collection) | 75 | self.logger.debug("Looking for collection %s..." % collection) |
| 76 | result = self.layerindex.find_collection(collection) | 76 | result = self.layerindex.find_collection(collection) |
| 77 | if expected: | 77 | if expected: |
| 78 | self.assertIsNotNone(result, msg="Did not find %s when it shouldn't be there" % collection) | 78 | self.assertIsNotNone(result, msg="Did not find %s when it shouldn't be there" % collection) |
| @@ -91,7 +91,7 @@ class LayerIndexCookerTest(LayersTest): | |||
| 91 | 91 | ||
| 92 | def test_find_layerbranch(self): | 92 | def test_find_layerbranch(self): |
| 93 | def _check(name, expected): | 93 | def _check(name, expected): |
| 94 | self.logger.debug(1, "Looking for layerbranch %s..." % name) | 94 | self.logger.debug("Looking for layerbranch %s..." % name) |
| 95 | result = self.layerindex.find_layerbranch(name) | 95 | result = self.layerindex.find_layerbranch(name) |
| 96 | if expected: | 96 | if expected: |
| 97 | self.assertIsNotNone(result, msg="Did not find %s when it shouldn't be there" % collection) | 97 | self.assertIsNotNone(result, msg="Did not find %s when it shouldn't be there" % collection) |
diff --git a/bitbake/lib/layerindexlib/tests/restapi.py b/bitbake/lib/layerindexlib/tests/restapi.py index 4646d01f9f..33b5c1c4c8 100644 --- a/bitbake/lib/layerindexlib/tests/restapi.py +++ b/bitbake/lib/layerindexlib/tests/restapi.py | |||
| @@ -57,11 +57,11 @@ class LayerIndexWebRestApiTest(LayersTest): | |||
| 57 | type in self.layerindex.indexes[0].config['local']: | 57 | type in self.layerindex.indexes[0].config['local']: |
| 58 | continue | 58 | continue |
| 59 | for id in getattr(self.layerindex.indexes[0], type): | 59 | for id in getattr(self.layerindex.indexes[0], type): |
| 60 | self.logger.debug(1, "type %s" % (type)) | 60 | self.logger.debug("type %s" % (type)) |
| 61 | 61 | ||
| 62 | self.assertTrue(id in getattr(reload.indexes[0], type), msg="Id number not in reloaded index") | 62 | self.assertTrue(id in getattr(reload.indexes[0], type), msg="Id number not in reloaded index") |
| 63 | 63 | ||
| 64 | self.logger.debug(1, "%s ? %s" % (getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id])) | 64 | self.logger.debug("%s ? %s" % (getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id])) |
| 65 | 65 | ||
| 66 | self.assertEqual(getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id], msg="Reloaded contents different") | 66 | self.assertEqual(getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id], msg="Reloaded contents different") |
| 67 | 67 | ||
| @@ -80,11 +80,11 @@ class LayerIndexWebRestApiTest(LayersTest): | |||
| 80 | type in self.layerindex.indexes[0].config['local']: | 80 | type in self.layerindex.indexes[0].config['local']: |
| 81 | continue | 81 | continue |
| 82 | for id in getattr(self.layerindex.indexes[0] ,type): | 82 | for id in getattr(self.layerindex.indexes[0] ,type): |
| 83 | self.logger.debug(1, "type %s" % (type)) | 83 | self.logger.debug("type %s" % (type)) |
| 84 | 84 | ||
| 85 | self.assertTrue(id in getattr(reload.indexes[0], type), msg="Id number missing from reloaded data") | 85 | self.assertTrue(id in getattr(reload.indexes[0], type), msg="Id number missing from reloaded data") |
| 86 | 86 | ||
| 87 | self.logger.debug(1, "%s ? %s" % (getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id])) | 87 | self.logger.debug("%s ? %s" % (getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id])) |
| 88 | 88 | ||
| 89 | self.assertEqual(getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id], msg="reloaded data does not match original") | 89 | self.assertEqual(getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id], msg="reloaded data does not match original") |
| 90 | 90 | ||
| @@ -111,14 +111,14 @@ class LayerIndexWebRestApiTest(LayersTest): | |||
| 111 | if dep.layer.name == 'meta-python': | 111 | if dep.layer.name == 'meta-python': |
| 112 | break | 112 | break |
| 113 | else: | 113 | else: |
| 114 | self.logger.debug(1, "meta-python was not found") | 114 | self.logger.debug("meta-python was not found") |
| 115 | raise self.failureException | 115 | raise self.failureException |
| 116 | 116 | ||
| 117 | # Only check the first element... | 117 | # Only check the first element... |
| 118 | break | 118 | break |
| 119 | else: | 119 | else: |
| 120 | # Empty list, this is bad. | 120 | # Empty list, this is bad. |
| 121 | self.logger.debug(1, "Empty list of dependencies") | 121 | self.logger.debug("Empty list of dependencies") |
| 122 | self.assertIsNotNone(first, msg="Empty list of dependencies") | 122 | self.assertIsNotNone(first, msg="Empty list of dependencies") |
| 123 | 123 | ||
| 124 | # Last dep should be the requested item | 124 | # Last dep should be the requested item |
| @@ -128,7 +128,7 @@ class LayerIndexWebRestApiTest(LayersTest): | |||
| 128 | @skipIfNoNetwork() | 128 | @skipIfNoNetwork() |
| 129 | def test_find_collection(self): | 129 | def test_find_collection(self): |
| 130 | def _check(collection, expected): | 130 | def _check(collection, expected): |
| 131 | self.logger.debug(1, "Looking for collection %s..." % collection) | 131 | self.logger.debug("Looking for collection %s..." % collection) |
| 132 | result = self.layerindex.find_collection(collection) | 132 | result = self.layerindex.find_collection(collection) |
| 133 | if expected: | 133 | if expected: |
| 134 | self.assertIsNotNone(result, msg="Did not find %s when it should be there" % collection) | 134 | self.assertIsNotNone(result, msg="Did not find %s when it should be there" % collection) |
| @@ -148,11 +148,11 @@ class LayerIndexWebRestApiTest(LayersTest): | |||
| 148 | @skipIfNoNetwork() | 148 | @skipIfNoNetwork() |
| 149 | def test_find_layerbranch(self): | 149 | def test_find_layerbranch(self): |
| 150 | def _check(name, expected): | 150 | def _check(name, expected): |
| 151 | self.logger.debug(1, "Looking for layerbranch %s..." % name) | 151 | self.logger.debug("Looking for layerbranch %s..." % name) |
| 152 | 152 | ||
| 153 | for index in self.layerindex.indexes: | 153 | for index in self.layerindex.indexes: |
| 154 | for layerbranchid in index.layerBranches: | 154 | for layerbranchid in index.layerBranches: |
| 155 | self.logger.debug(1, "Present: %s" % index.layerBranches[layerbranchid].layer.name) | 155 | self.logger.debug("Present: %s" % index.layerBranches[layerbranchid].layer.name) |
| 156 | result = self.layerindex.find_layerbranch(name) | 156 | result = self.layerindex.find_layerbranch(name) |
| 157 | if expected: | 157 | if expected: |
| 158 | self.assertIsNotNone(result, msg="Did not find %s when it should be there" % collection) | 158 | self.assertIsNotNone(result, msg="Did not find %s when it should be there" % collection) |
