diff options
author | Joshua Watt <JPEWhacker@gmail.com> | 2021-02-09 09:50:21 -0600 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2021-02-10 23:48:16 +0000 |
commit | 75f87db413f3659fee18eff389b7b339b01cce15 (patch) | |
tree | e733135549b516c72c4f34172b6bbf865377fc76 /bitbake/lib/bb/fetch2 | |
parent | 7283a0b3b6ca49d0d2e13593333a580ef10439a8 (diff) | |
download | poky-75f87db413f3659fee18eff389b7b339b01cce15.tar.gz |
bitbake: logging: Make bitbake logger compatible with python logger
The bitbake logger overrode the definition of the debug() logging call
to include a debug level, but this causes problems with code that may
be using standard python logging, since the extra argument is
interpreted differently.
Instead, change the bitbake loggers debug() call to match the python
logger call and add a debug2() and debug3() API to replace calls that
were logging to a different debug level.
[RP: Small fix to ensure bb.debug calls bbdebug()]
(Bitbake rev: f68682a79d83e6399eb403f30a1f113516575f51)
Signed-off-by: Joshua Watt <JPEWhacker@gmail.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 38 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/bzr.py | 8 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/clearcase.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/cvs.py | 4 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/gitsm.py | 4 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/hg.py | 16 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/local.py | 4 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/osc.py | 6 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/perforce.py | 10 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/repo.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/svn.py | 6 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/wget.py | 6 |
12 files changed, 53 insertions, 53 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index ee3d7b1672..19169d780f 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -428,7 +428,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
428 | uri_decoded = list(decodeurl(ud.url)) | 428 | uri_decoded = list(decodeurl(ud.url)) |
429 | uri_find_decoded = list(decodeurl(uri_find)) | 429 | uri_find_decoded = list(decodeurl(uri_find)) |
430 | uri_replace_decoded = list(decodeurl(uri_replace)) | 430 | uri_replace_decoded = list(decodeurl(uri_replace)) |
431 | logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) | 431 | logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) |
432 | result_decoded = ['', '', '', '', '', {}] | 432 | result_decoded = ['', '', '', '', '', {}] |
433 | for loc, i in enumerate(uri_find_decoded): | 433 | for loc, i in enumerate(uri_find_decoded): |
434 | result_decoded[loc] = uri_decoded[loc] | 434 | result_decoded[loc] = uri_decoded[loc] |
@@ -474,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): | |||
474 | result = encodeurl(result_decoded) | 474 | result = encodeurl(result_decoded) |
475 | if result == ud.url: | 475 | if result == ud.url: |
476 | return None | 476 | return None |
477 | logger.debug(2, "For url %s returning %s" % (ud.url, result)) | 477 | logger.debug2("For url %s returning %s" % (ud.url, result)) |
478 | return result | 478 | return result |
479 | 479 | ||
480 | methods = [] | 480 | methods = [] |
@@ -499,9 +499,9 @@ def fetcher_init(d): | |||
499 | # When to drop SCM head revisions controlled by user policy | 499 | # When to drop SCM head revisions controlled by user policy |
500 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" | 500 | srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" |
501 | if srcrev_policy == "cache": | 501 | if srcrev_policy == "cache": |
502 | logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) | 502 | logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) |
503 | elif srcrev_policy == "clear": | 503 | elif srcrev_policy == "clear": |
504 | logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) | 504 | logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) |
505 | revs.clear() | 505 | revs.clear() |
506 | else: | 506 | else: |
507 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) | 507 | raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) |
@@ -857,9 +857,9 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): | |||
857 | cmd = 'export PSEUDO_DISABLED=1; ' + cmd | 857 | cmd = 'export PSEUDO_DISABLED=1; ' + cmd |
858 | 858 | ||
859 | if workdir: | 859 | if workdir: |
860 | logger.debug(1, "Running '%s' in %s" % (cmd, workdir)) | 860 | logger.debug("Running '%s' in %s" % (cmd, workdir)) |
861 | else: | 861 | else: |
862 | logger.debug(1, "Running %s", cmd) | 862 | logger.debug("Running %s", cmd) |
863 | 863 | ||
864 | success = False | 864 | success = False |
865 | error_message = "" | 865 | error_message = "" |
@@ -900,7 +900,7 @@ def check_network_access(d, info, url): | |||
900 | elif not trusted_network(d, url): | 900 | elif not trusted_network(d, url): |
901 | raise UntrustedUrl(url, info) | 901 | raise UntrustedUrl(url, info) |
902 | else: | 902 | else: |
903 | logger.debug(1, "Fetcher accessed the network with the command %s" % info) | 903 | logger.debug("Fetcher accessed the network with the command %s" % info) |
904 | 904 | ||
905 | def build_mirroruris(origud, mirrors, ld): | 905 | def build_mirroruris(origud, mirrors, ld): |
906 | uris = [] | 906 | uris = [] |
@@ -926,7 +926,7 @@ def build_mirroruris(origud, mirrors, ld): | |||
926 | continue | 926 | continue |
927 | 927 | ||
928 | if not trusted_network(ld, newuri): | 928 | if not trusted_network(ld, newuri): |
929 | logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri)) | 929 | logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri)) |
930 | continue | 930 | continue |
931 | 931 | ||
932 | # Create a local copy of the mirrors minus the current line | 932 | # Create a local copy of the mirrors minus the current line |
@@ -939,8 +939,8 @@ def build_mirroruris(origud, mirrors, ld): | |||
939 | newud = FetchData(newuri, ld) | 939 | newud = FetchData(newuri, ld) |
940 | newud.setup_localpath(ld) | 940 | newud.setup_localpath(ld) |
941 | except bb.fetch2.BBFetchException as e: | 941 | except bb.fetch2.BBFetchException as e: |
942 | logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) | 942 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) |
943 | logger.debug(1, str(e)) | 943 | logger.debug(str(e)) |
944 | try: | 944 | try: |
945 | # setup_localpath of file:// urls may fail, we should still see | 945 | # setup_localpath of file:// urls may fail, we should still see |
946 | # if mirrors of the url exist | 946 | # if mirrors of the url exist |
@@ -1043,8 +1043,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): | |||
1043 | elif isinstance(e, NoChecksumError): | 1043 | elif isinstance(e, NoChecksumError): |
1044 | raise | 1044 | raise |
1045 | else: | 1045 | else: |
1046 | logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) | 1046 | logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) |
1047 | logger.debug(1, str(e)) | 1047 | logger.debug(str(e)) |
1048 | try: | 1048 | try: |
1049 | ud.method.clean(ud, ld) | 1049 | ud.method.clean(ud, ld) |
1050 | except UnboundLocalError: | 1050 | except UnboundLocalError: |
@@ -1688,7 +1688,7 @@ class Fetch(object): | |||
1688 | if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): | 1688 | if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): |
1689 | done = True | 1689 | done = True |
1690 | elif m.try_premirror(ud, self.d): | 1690 | elif m.try_premirror(ud, self.d): |
1691 | logger.debug(1, "Trying PREMIRRORS") | 1691 | logger.debug("Trying PREMIRRORS") |
1692 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) | 1692 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) |
1693 | done = m.try_mirrors(self, ud, self.d, mirrors) | 1693 | done = m.try_mirrors(self, ud, self.d, mirrors) |
1694 | if done: | 1694 | if done: |
@@ -1698,7 +1698,7 @@ class Fetch(object): | |||
1698 | m.update_donestamp(ud, self.d) | 1698 | m.update_donestamp(ud, self.d) |
1699 | except ChecksumError as e: | 1699 | except ChecksumError as e: |
1700 | logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u) | 1700 | logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u) |
1701 | logger.debug(1, str(e)) | 1701 | logger.debug(str(e)) |
1702 | done = False | 1702 | done = False |
1703 | 1703 | ||
1704 | if premirroronly: | 1704 | if premirroronly: |
@@ -1710,7 +1710,7 @@ class Fetch(object): | |||
1710 | try: | 1710 | try: |
1711 | if not trusted_network(self.d, ud.url): | 1711 | if not trusted_network(self.d, ud.url): |
1712 | raise UntrustedUrl(ud.url) | 1712 | raise UntrustedUrl(ud.url) |
1713 | logger.debug(1, "Trying Upstream") | 1713 | logger.debug("Trying Upstream") |
1714 | m.download(ud, self.d) | 1714 | m.download(ud, self.d) |
1715 | if hasattr(m, "build_mirror_data"): | 1715 | if hasattr(m, "build_mirror_data"): |
1716 | m.build_mirror_data(ud, self.d) | 1716 | m.build_mirror_data(ud, self.d) |
@@ -1725,19 +1725,19 @@ class Fetch(object): | |||
1725 | except BBFetchException as e: | 1725 | except BBFetchException as e: |
1726 | if isinstance(e, ChecksumError): | 1726 | if isinstance(e, ChecksumError): |
1727 | logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u) | 1727 | logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u) |
1728 | logger.debug(1, str(e)) | 1728 | logger.debug(str(e)) |
1729 | if os.path.exists(ud.localpath): | 1729 | if os.path.exists(ud.localpath): |
1730 | rename_bad_checksum(ud, e.checksum) | 1730 | rename_bad_checksum(ud, e.checksum) |
1731 | elif isinstance(e, NoChecksumError): | 1731 | elif isinstance(e, NoChecksumError): |
1732 | raise | 1732 | raise |
1733 | else: | 1733 | else: |
1734 | logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u) | 1734 | logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u) |
1735 | logger.debug(1, str(e)) | 1735 | logger.debug(str(e)) |
1736 | firsterr = e | 1736 | firsterr = e |
1737 | # Remove any incomplete fetch | 1737 | # Remove any incomplete fetch |
1738 | if not verified_stamp: | 1738 | if not verified_stamp: |
1739 | m.clean(ud, self.d) | 1739 | m.clean(ud, self.d) |
1740 | logger.debug(1, "Trying MIRRORS") | 1740 | logger.debug("Trying MIRRORS") |
1741 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) | 1741 | mirrors = mirror_from_string(self.d.getVar('MIRRORS')) |
1742 | done = m.try_mirrors(self, ud, self.d, mirrors) | 1742 | done = m.try_mirrors(self, ud, self.d, mirrors) |
1743 | 1743 | ||
@@ -1774,7 +1774,7 @@ class Fetch(object): | |||
1774 | ud = self.ud[u] | 1774 | ud = self.ud[u] |
1775 | ud.setup_localpath(self.d) | 1775 | ud.setup_localpath(self.d) |
1776 | m = ud.method | 1776 | m = ud.method |
1777 | logger.debug(1, "Testing URL %s", u) | 1777 | logger.debug("Testing URL %s", u) |
1778 | # First try checking uri, u, from PREMIRRORS | 1778 | # First try checking uri, u, from PREMIRRORS |
1779 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) | 1779 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) |
1780 | ret = m.try_mirrors(self, ud, self.d, mirrors, True) | 1780 | ret = m.try_mirrors(self, ud, self.d, mirrors, True) |
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py index 566ace9f05..fc558f50b0 100644 --- a/bitbake/lib/bb/fetch2/bzr.py +++ b/bitbake/lib/bb/fetch2/bzr.py | |||
@@ -74,16 +74,16 @@ class Bzr(FetchMethod): | |||
74 | 74 | ||
75 | if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): | 75 | if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): |
76 | bzrcmd = self._buildbzrcommand(ud, d, "update") | 76 | bzrcmd = self._buildbzrcommand(ud, d, "update") |
77 | logger.debug(1, "BZR Update %s", ud.url) | 77 | logger.debug("BZR Update %s", ud.url) |
78 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) | 78 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) |
79 | runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path))) | 79 | runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path))) |
80 | else: | 80 | else: |
81 | bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) | 81 | bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) |
82 | bzrcmd = self._buildbzrcommand(ud, d, "fetch") | 82 | bzrcmd = self._buildbzrcommand(ud, d, "fetch") |
83 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) | 83 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) |
84 | logger.debug(1, "BZR Checkout %s", ud.url) | 84 | logger.debug("BZR Checkout %s", ud.url) |
85 | bb.utils.mkdirhier(ud.pkgdir) | 85 | bb.utils.mkdirhier(ud.pkgdir) |
86 | logger.debug(1, "Running %s", bzrcmd) | 86 | logger.debug("Running %s", bzrcmd) |
87 | runfetchcmd(bzrcmd, d, workdir=ud.pkgdir) | 87 | runfetchcmd(bzrcmd, d, workdir=ud.pkgdir) |
88 | 88 | ||
89 | scmdata = ud.parm.get("scmdata", "") | 89 | scmdata = ud.parm.get("scmdata", "") |
@@ -109,7 +109,7 @@ class Bzr(FetchMethod): | |||
109 | """ | 109 | """ |
110 | Return the latest upstream revision number | 110 | Return the latest upstream revision number |
111 | """ | 111 | """ |
112 | logger.debug(2, "BZR fetcher hitting network for %s", ud.url) | 112 | logger.debug2("BZR fetcher hitting network for %s", ud.url) |
113 | 113 | ||
114 | bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) | 114 | bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) |
115 | 115 | ||
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py index 49d7ae1b09..1a9c863769 100644 --- a/bitbake/lib/bb/fetch2/clearcase.py +++ b/bitbake/lib/bb/fetch2/clearcase.py | |||
@@ -70,7 +70,7 @@ class ClearCase(FetchMethod): | |||
70 | return ud.type in ['ccrc'] | 70 | return ud.type in ['ccrc'] |
71 | 71 | ||
72 | def debug(self, msg): | 72 | def debug(self, msg): |
73 | logger.debug(1, "ClearCase: %s", msg) | 73 | logger.debug("ClearCase: %s", msg) |
74 | 74 | ||
75 | def urldata_init(self, ud, d): | 75 | def urldata_init(self, ud, d): |
76 | """ | 76 | """ |
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py index 22abdef792..01de5ff4ca 100644 --- a/bitbake/lib/bb/fetch2/cvs.py +++ b/bitbake/lib/bb/fetch2/cvs.py | |||
@@ -109,7 +109,7 @@ class Cvs(FetchMethod): | |||
109 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) | 109 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) |
110 | 110 | ||
111 | # create module directory | 111 | # create module directory |
112 | logger.debug(2, "Fetch: checking for module directory") | 112 | logger.debug2("Fetch: checking for module directory") |
113 | moddir = os.path.join(ud.pkgdir, localdir) | 113 | moddir = os.path.join(ud.pkgdir, localdir) |
114 | workdir = None | 114 | workdir = None |
115 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): | 115 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): |
@@ -123,7 +123,7 @@ class Cvs(FetchMethod): | |||
123 | # check out sources there | 123 | # check out sources there |
124 | bb.utils.mkdirhier(ud.pkgdir) | 124 | bb.utils.mkdirhier(ud.pkgdir) |
125 | workdir = ud.pkgdir | 125 | workdir = ud.pkgdir |
126 | logger.debug(1, "Running %s", cvscmd) | 126 | logger.debug("Running %s", cvscmd) |
127 | bb.fetch2.check_network_access(d, cvscmd, ud.url) | 127 | bb.fetch2.check_network_access(d, cvscmd, ud.url) |
128 | cmd = cvscmd | 128 | cmd = cvscmd |
129 | 129 | ||
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py index d6e5c5c050..a4527bf364 100644 --- a/bitbake/lib/bb/fetch2/gitsm.py +++ b/bitbake/lib/bb/fetch2/gitsm.py | |||
@@ -78,7 +78,7 @@ class GitSM(Git): | |||
78 | module_hash = "" | 78 | module_hash = "" |
79 | 79 | ||
80 | if not module_hash: | 80 | if not module_hash: |
81 | logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m) | 81 | logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) |
82 | continue | 82 | continue |
83 | 83 | ||
84 | submodules.append(m) | 84 | submodules.append(m) |
@@ -179,7 +179,7 @@ class GitSM(Git): | |||
179 | (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) | 179 | (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) |
180 | 180 | ||
181 | if len(need_update_list) > 0: | 181 | if len(need_update_list) > 0: |
182 | logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) | 182 | logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) |
183 | return True | 183 | return True |
184 | 184 | ||
185 | return False | 185 | return False |
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py index 8f503701ed..063e13008a 100644 --- a/bitbake/lib/bb/fetch2/hg.py +++ b/bitbake/lib/bb/fetch2/hg.py | |||
@@ -150,7 +150,7 @@ class Hg(FetchMethod): | |||
150 | def download(self, ud, d): | 150 | def download(self, ud, d): |
151 | """Fetch url""" | 151 | """Fetch url""" |
152 | 152 | ||
153 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 153 | logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") |
154 | 154 | ||
155 | # If the checkout doesn't exist and the mirror tarball does, extract it | 155 | # If the checkout doesn't exist and the mirror tarball does, extract it |
156 | if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror): | 156 | if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror): |
@@ -160,7 +160,7 @@ class Hg(FetchMethod): | |||
160 | if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): | 160 | if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): |
161 | # Found the source, check whether need pull | 161 | # Found the source, check whether need pull |
162 | updatecmd = self._buildhgcommand(ud, d, "update") | 162 | updatecmd = self._buildhgcommand(ud, d, "update") |
163 | logger.debug(1, "Running %s", updatecmd) | 163 | logger.debug("Running %s", updatecmd) |
164 | try: | 164 | try: |
165 | runfetchcmd(updatecmd, d, workdir=ud.moddir) | 165 | runfetchcmd(updatecmd, d, workdir=ud.moddir) |
166 | except bb.fetch2.FetchError: | 166 | except bb.fetch2.FetchError: |
@@ -168,7 +168,7 @@ class Hg(FetchMethod): | |||
168 | pullcmd = self._buildhgcommand(ud, d, "pull") | 168 | pullcmd = self._buildhgcommand(ud, d, "pull") |
169 | logger.info("Pulling " + ud.url) | 169 | logger.info("Pulling " + ud.url) |
170 | # update sources there | 170 | # update sources there |
171 | logger.debug(1, "Running %s", pullcmd) | 171 | logger.debug("Running %s", pullcmd) |
172 | bb.fetch2.check_network_access(d, pullcmd, ud.url) | 172 | bb.fetch2.check_network_access(d, pullcmd, ud.url) |
173 | runfetchcmd(pullcmd, d, workdir=ud.moddir) | 173 | runfetchcmd(pullcmd, d, workdir=ud.moddir) |
174 | try: | 174 | try: |
@@ -183,14 +183,14 @@ class Hg(FetchMethod): | |||
183 | logger.info("Fetch " + ud.url) | 183 | logger.info("Fetch " + ud.url) |
184 | # check out sources there | 184 | # check out sources there |
185 | bb.utils.mkdirhier(ud.pkgdir) | 185 | bb.utils.mkdirhier(ud.pkgdir) |
186 | logger.debug(1, "Running %s", fetchcmd) | 186 | logger.debug("Running %s", fetchcmd) |
187 | bb.fetch2.check_network_access(d, fetchcmd, ud.url) | 187 | bb.fetch2.check_network_access(d, fetchcmd, ud.url) |
188 | runfetchcmd(fetchcmd, d, workdir=ud.pkgdir) | 188 | runfetchcmd(fetchcmd, d, workdir=ud.pkgdir) |
189 | 189 | ||
190 | # Even when we clone (fetch), we still need to update as hg's clone | 190 | # Even when we clone (fetch), we still need to update as hg's clone |
191 | # won't checkout the specified revision if its on a branch | 191 | # won't checkout the specified revision if its on a branch |
192 | updatecmd = self._buildhgcommand(ud, d, "update") | 192 | updatecmd = self._buildhgcommand(ud, d, "update") |
193 | logger.debug(1, "Running %s", updatecmd) | 193 | logger.debug("Running %s", updatecmd) |
194 | runfetchcmd(updatecmd, d, workdir=ud.moddir) | 194 | runfetchcmd(updatecmd, d, workdir=ud.moddir) |
195 | 195 | ||
196 | def clean(self, ud, d): | 196 | def clean(self, ud, d): |
@@ -247,9 +247,9 @@ class Hg(FetchMethod): | |||
247 | if scmdata != "nokeep": | 247 | if scmdata != "nokeep": |
248 | proto = ud.parm.get('protocol', 'http') | 248 | proto = ud.parm.get('protocol', 'http') |
249 | if not os.access(os.path.join(codir, '.hg'), os.R_OK): | 249 | if not os.access(os.path.join(codir, '.hg'), os.R_OK): |
250 | logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'") | 250 | logger.debug2("Unpack: creating new hg repository in '" + codir + "'") |
251 | runfetchcmd("%s init %s" % (ud.basecmd, codir), d) | 251 | runfetchcmd("%s init %s" % (ud.basecmd, codir), d) |
252 | logger.debug(2, "Unpack: updating source in '" + codir + "'") | 252 | logger.debug2("Unpack: updating source in '" + codir + "'") |
253 | if ud.user and ud.pswd: | 253 | if ud.user and ud.pswd: |
254 | runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir) | 254 | runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir) |
255 | else: | 255 | else: |
@@ -259,5 +259,5 @@ class Hg(FetchMethod): | |||
259 | else: | 259 | else: |
260 | runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir) | 260 | runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir) |
261 | else: | 261 | else: |
262 | logger.debug(2, "Unpack: extracting source to '" + codir + "'") | 262 | logger.debug2("Unpack: extracting source to '" + codir + "'") |
263 | runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir) | 263 | runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir) |
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py index 25d4557db6..e7d1c8c58f 100644 --- a/bitbake/lib/bb/fetch2/local.py +++ b/bitbake/lib/bb/fetch2/local.py | |||
@@ -54,12 +54,12 @@ class Local(FetchMethod): | |||
54 | return [path] | 54 | return [path] |
55 | filespath = d.getVar('FILESPATH') | 55 | filespath = d.getVar('FILESPATH') |
56 | if filespath: | 56 | if filespath: |
57 | logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) | 57 | logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) |
58 | newpath, hist = bb.utils.which(filespath, path, history=True) | 58 | newpath, hist = bb.utils.which(filespath, path, history=True) |
59 | searched.extend(hist) | 59 | searched.extend(hist) |
60 | if not os.path.exists(newpath): | 60 | if not os.path.exists(newpath): |
61 | dldirfile = os.path.join(d.getVar("DL_DIR"), path) | 61 | dldirfile = os.path.join(d.getVar("DL_DIR"), path) |
62 | logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) | 62 | logger.debug2("Defaulting to %s for %s" % (dldirfile, path)) |
63 | bb.utils.mkdirhier(os.path.dirname(dldirfile)) | 63 | bb.utils.mkdirhier(os.path.dirname(dldirfile)) |
64 | searched.append(dldirfile) | 64 | searched.append(dldirfile) |
65 | return searched | 65 | return searched |
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py index 3a6cd29510..d9ce44390c 100644 --- a/bitbake/lib/bb/fetch2/osc.py +++ b/bitbake/lib/bb/fetch2/osc.py | |||
@@ -84,13 +84,13 @@ class Osc(FetchMethod): | |||
84 | Fetch url | 84 | Fetch url |
85 | """ | 85 | """ |
86 | 86 | ||
87 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 87 | logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") |
88 | 88 | ||
89 | if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): | 89 | if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): |
90 | oscupdatecmd = self._buildosccommand(ud, d, "update") | 90 | oscupdatecmd = self._buildosccommand(ud, d, "update") |
91 | logger.info("Update "+ ud.url) | 91 | logger.info("Update "+ ud.url) |
92 | # update sources there | 92 | # update sources there |
93 | logger.debug(1, "Running %s", oscupdatecmd) | 93 | logger.debug("Running %s", oscupdatecmd) |
94 | bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) | 94 | bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) |
95 | runfetchcmd(oscupdatecmd, d, workdir=ud.moddir) | 95 | runfetchcmd(oscupdatecmd, d, workdir=ud.moddir) |
96 | else: | 96 | else: |
@@ -98,7 +98,7 @@ class Osc(FetchMethod): | |||
98 | logger.info("Fetch " + ud.url) | 98 | logger.info("Fetch " + ud.url) |
99 | # check out sources there | 99 | # check out sources there |
100 | bb.utils.mkdirhier(ud.pkgdir) | 100 | bb.utils.mkdirhier(ud.pkgdir) |
101 | logger.debug(1, "Running %s", oscfetchcmd) | 101 | logger.debug("Running %s", oscfetchcmd) |
102 | bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) | 102 | bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) |
103 | runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir) | 103 | runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir) |
104 | 104 | ||
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py index da6d337461..e2a41a4a12 100644 --- a/bitbake/lib/bb/fetch2/perforce.py +++ b/bitbake/lib/bb/fetch2/perforce.py | |||
@@ -90,16 +90,16 @@ class Perforce(FetchMethod): | |||
90 | p4port = d.getVar('P4PORT') | 90 | p4port = d.getVar('P4PORT') |
91 | 91 | ||
92 | if p4port: | 92 | if p4port: |
93 | logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) | 93 | logger.debug('Using recipe provided P4PORT: %s' % p4port) |
94 | ud.host = p4port | 94 | ud.host = p4port |
95 | else: | 95 | else: |
96 | logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...') | 96 | logger.debug('Trying to use P4CONFIG to automatically set P4PORT...') |
97 | ud.usingp4config = True | 97 | ud.usingp4config = True |
98 | p4cmd = '%s info | grep "Server address"' % ud.basecmd | 98 | p4cmd = '%s info | grep "Server address"' % ud.basecmd |
99 | bb.fetch2.check_network_access(d, p4cmd, ud.url) | 99 | bb.fetch2.check_network_access(d, p4cmd, ud.url) |
100 | ud.host = runfetchcmd(p4cmd, d, True) | 100 | ud.host = runfetchcmd(p4cmd, d, True) |
101 | ud.host = ud.host.split(': ')[1].strip() | 101 | ud.host = ud.host.split(': ')[1].strip() |
102 | logger.debug(1, 'Determined P4PORT to be: %s' % ud.host) | 102 | logger.debug('Determined P4PORT to be: %s' % ud.host) |
103 | if not ud.host: | 103 | if not ud.host: |
104 | raise FetchError('Could not determine P4PORT from P4CONFIG') | 104 | raise FetchError('Could not determine P4PORT from P4CONFIG') |
105 | 105 | ||
@@ -208,7 +208,7 @@ class Perforce(FetchMethod): | |||
208 | for filename in p4fileslist: | 208 | for filename in p4fileslist: |
209 | item = filename.split(' - ') | 209 | item = filename.split(' - ') |
210 | lastaction = item[1].split() | 210 | lastaction = item[1].split() |
211 | logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0])) | 211 | logger.debug('File: %s Last Action: %s' % (item[0], lastaction[0])) |
212 | if lastaction[0] == 'delete': | 212 | if lastaction[0] == 'delete': |
213 | continue | 213 | continue |
214 | filelist.append(item[0]) | 214 | filelist.append(item[0]) |
@@ -255,7 +255,7 @@ class Perforce(FetchMethod): | |||
255 | raise FetchError('Could not determine the latest perforce changelist') | 255 | raise FetchError('Could not determine the latest perforce changelist') |
256 | 256 | ||
257 | tipcset = tip.split(' ')[1] | 257 | tipcset = tip.split(' ')[1] |
258 | logger.debug(1, 'p4 tip found to be changelist %s' % tipcset) | 258 | logger.debug('p4 tip found to be changelist %s' % tipcset) |
259 | return tipcset | 259 | return tipcset |
260 | 260 | ||
261 | def sortable_revision(self, ud, d, name): | 261 | def sortable_revision(self, ud, d, name): |
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py index 2bdbbd4097..fa4cb8149b 100644 --- a/bitbake/lib/bb/fetch2/repo.py +++ b/bitbake/lib/bb/fetch2/repo.py | |||
@@ -47,7 +47,7 @@ class Repo(FetchMethod): | |||
47 | """Fetch url""" | 47 | """Fetch url""" |
48 | 48 | ||
49 | if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK): | 49 | if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK): |
50 | logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) | 50 | logger.debug("%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) |
51 | return | 51 | return |
52 | 52 | ||
53 | repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo") | 53 | repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo") |
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py index 971a5add4a..8856ef1c62 100644 --- a/bitbake/lib/bb/fetch2/svn.py +++ b/bitbake/lib/bb/fetch2/svn.py | |||
@@ -116,7 +116,7 @@ class Svn(FetchMethod): | |||
116 | def download(self, ud, d): | 116 | def download(self, ud, d): |
117 | """Fetch url""" | 117 | """Fetch url""" |
118 | 118 | ||
119 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 119 | logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") |
120 | 120 | ||
121 | lf = bb.utils.lockfile(ud.svnlock) | 121 | lf = bb.utils.lockfile(ud.svnlock) |
122 | 122 | ||
@@ -129,7 +129,7 @@ class Svn(FetchMethod): | |||
129 | runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) | 129 | runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) |
130 | except FetchError: | 130 | except FetchError: |
131 | pass | 131 | pass |
132 | logger.debug(1, "Running %s", svncmd) | 132 | logger.debug("Running %s", svncmd) |
133 | bb.fetch2.check_network_access(d, svncmd, ud.url) | 133 | bb.fetch2.check_network_access(d, svncmd, ud.url) |
134 | runfetchcmd(svncmd, d, workdir=ud.moddir) | 134 | runfetchcmd(svncmd, d, workdir=ud.moddir) |
135 | else: | 135 | else: |
@@ -137,7 +137,7 @@ class Svn(FetchMethod): | |||
137 | logger.info("Fetch " + ud.url) | 137 | logger.info("Fetch " + ud.url) |
138 | # check out sources there | 138 | # check out sources there |
139 | bb.utils.mkdirhier(ud.pkgdir) | 139 | bb.utils.mkdirhier(ud.pkgdir) |
140 | logger.debug(1, "Running %s", svncmd) | 140 | logger.debug("Running %s", svncmd) |
141 | bb.fetch2.check_network_access(d, svncmd, ud.url) | 141 | bb.fetch2.check_network_access(d, svncmd, ud.url) |
142 | runfetchcmd(svncmd, d, workdir=ud.pkgdir) | 142 | runfetchcmd(svncmd, d, workdir=ud.pkgdir) |
143 | 143 | ||
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index e952f411c7..78a49676fe 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -88,7 +88,7 @@ class Wget(FetchMethod): | |||
88 | 88 | ||
89 | progresshandler = WgetProgressHandler(d) | 89 | progresshandler = WgetProgressHandler(d) |
90 | 90 | ||
91 | logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) | 91 | logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) |
92 | bb.fetch2.check_network_access(d, command, ud.url) | 92 | bb.fetch2.check_network_access(d, command, ud.url) |
93 | runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) | 93 | runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) |
94 | 94 | ||
@@ -326,11 +326,11 @@ class Wget(FetchMethod): | |||
326 | pass | 326 | pass |
327 | except urllib.error.URLError as e: | 327 | except urllib.error.URLError as e: |
328 | if try_again: | 328 | if try_again: |
329 | logger.debug(2, "checkstatus: trying again") | 329 | logger.debug2("checkstatus: trying again") |
330 | return self.checkstatus(fetch, ud, d, False) | 330 | return self.checkstatus(fetch, ud, d, False) |
331 | else: | 331 | else: |
332 | # debug for now to avoid spamming the logs in e.g. remote sstate searches | 332 | # debug for now to avoid spamming the logs in e.g. remote sstate searches |
333 | logger.debug(2, "checkstatus() urlopen failed: %s" % e) | 333 | logger.debug2("checkstatus() urlopen failed: %s" % e) |
334 | return False | 334 | return False |
335 | return True | 335 | return True |
336 | 336 | ||