summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py38
1 files changed, 19 insertions, 19 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index ee3d7b1672..19169d780f 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -428,7 +428,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
428 uri_decoded = list(decodeurl(ud.url)) 428 uri_decoded = list(decodeurl(ud.url))
429 uri_find_decoded = list(decodeurl(uri_find)) 429 uri_find_decoded = list(decodeurl(uri_find))
430 uri_replace_decoded = list(decodeurl(uri_replace)) 430 uri_replace_decoded = list(decodeurl(uri_replace))
431 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) 431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
432 result_decoded = ['', '', '', '', '', {}] 432 result_decoded = ['', '', '', '', '', {}]
433 for loc, i in enumerate(uri_find_decoded): 433 for loc, i in enumerate(uri_find_decoded):
434 result_decoded[loc] = uri_decoded[loc] 434 result_decoded[loc] = uri_decoded[loc]
@@ -474,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
474 result = encodeurl(result_decoded) 474 result = encodeurl(result_decoded)
475 if result == ud.url: 475 if result == ud.url:
476 return None 476 return None
477 logger.debug(2, "For url %s returning %s" % (ud.url, result)) 477 logger.debug2("For url %s returning %s" % (ud.url, result))
478 return result 478 return result
479 479
480methods = [] 480methods = []
@@ -499,9 +499,9 @@ def fetcher_init(d):
499 # When to drop SCM head revisions controlled by user policy 499 # When to drop SCM head revisions controlled by user policy
500 srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" 500 srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
501 if srcrev_policy == "cache": 501 if srcrev_policy == "cache":
502 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) 502 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
503 elif srcrev_policy == "clear": 503 elif srcrev_policy == "clear":
504 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) 504 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
505 revs.clear() 505 revs.clear()
506 else: 506 else:
507 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) 507 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
@@ -857,9 +857,9 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
857 cmd = 'export PSEUDO_DISABLED=1; ' + cmd 857 cmd = 'export PSEUDO_DISABLED=1; ' + cmd
858 858
859 if workdir: 859 if workdir:
860 logger.debug(1, "Running '%s' in %s" % (cmd, workdir)) 860 logger.debug("Running '%s' in %s" % (cmd, workdir))
861 else: 861 else:
862 logger.debug(1, "Running %s", cmd) 862 logger.debug("Running %s", cmd)
863 863
864 success = False 864 success = False
865 error_message = "" 865 error_message = ""
@@ -900,7 +900,7 @@ def check_network_access(d, info, url):
900 elif not trusted_network(d, url): 900 elif not trusted_network(d, url):
901 raise UntrustedUrl(url, info) 901 raise UntrustedUrl(url, info)
902 else: 902 else:
903 logger.debug(1, "Fetcher accessed the network with the command %s" % info) 903 logger.debug("Fetcher accessed the network with the command %s" % info)
904 904
905def build_mirroruris(origud, mirrors, ld): 905def build_mirroruris(origud, mirrors, ld):
906 uris = [] 906 uris = []
@@ -926,7 +926,7 @@ def build_mirroruris(origud, mirrors, ld):
926 continue 926 continue
927 927
928 if not trusted_network(ld, newuri): 928 if not trusted_network(ld, newuri):
929 logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri)) 929 logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri))
930 continue 930 continue
931 931
932 # Create a local copy of the mirrors minus the current line 932 # Create a local copy of the mirrors minus the current line
@@ -939,8 +939,8 @@ def build_mirroruris(origud, mirrors, ld):
939 newud = FetchData(newuri, ld) 939 newud = FetchData(newuri, ld)
940 newud.setup_localpath(ld) 940 newud.setup_localpath(ld)
941 except bb.fetch2.BBFetchException as e: 941 except bb.fetch2.BBFetchException as e:
942 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) 942 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
943 logger.debug(1, str(e)) 943 logger.debug(str(e))
944 try: 944 try:
945 # setup_localpath of file:// urls may fail, we should still see 945 # setup_localpath of file:// urls may fail, we should still see
946 # if mirrors of the url exist 946 # if mirrors of the url exist
@@ -1043,8 +1043,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1043 elif isinstance(e, NoChecksumError): 1043 elif isinstance(e, NoChecksumError):
1044 raise 1044 raise
1045 else: 1045 else:
1046 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) 1046 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
1047 logger.debug(1, str(e)) 1047 logger.debug(str(e))
1048 try: 1048 try:
1049 ud.method.clean(ud, ld) 1049 ud.method.clean(ud, ld)
1050 except UnboundLocalError: 1050 except UnboundLocalError:
@@ -1688,7 +1688,7 @@ class Fetch(object):
1688 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): 1688 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1689 done = True 1689 done = True
1690 elif m.try_premirror(ud, self.d): 1690 elif m.try_premirror(ud, self.d):
1691 logger.debug(1, "Trying PREMIRRORS") 1691 logger.debug("Trying PREMIRRORS")
1692 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) 1692 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
1693 done = m.try_mirrors(self, ud, self.d, mirrors) 1693 done = m.try_mirrors(self, ud, self.d, mirrors)
1694 if done: 1694 if done:
@@ -1698,7 +1698,7 @@ class Fetch(object):
1698 m.update_donestamp(ud, self.d) 1698 m.update_donestamp(ud, self.d)
1699 except ChecksumError as e: 1699 except ChecksumError as e:
1700 logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u) 1700 logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
1701 logger.debug(1, str(e)) 1701 logger.debug(str(e))
1702 done = False 1702 done = False
1703 1703
1704 if premirroronly: 1704 if premirroronly:
@@ -1710,7 +1710,7 @@ class Fetch(object):
1710 try: 1710 try:
1711 if not trusted_network(self.d, ud.url): 1711 if not trusted_network(self.d, ud.url):
1712 raise UntrustedUrl(ud.url) 1712 raise UntrustedUrl(ud.url)
1713 logger.debug(1, "Trying Upstream") 1713 logger.debug("Trying Upstream")
1714 m.download(ud, self.d) 1714 m.download(ud, self.d)
1715 if hasattr(m, "build_mirror_data"): 1715 if hasattr(m, "build_mirror_data"):
1716 m.build_mirror_data(ud, self.d) 1716 m.build_mirror_data(ud, self.d)
@@ -1725,19 +1725,19 @@ class Fetch(object):
1725 except BBFetchException as e: 1725 except BBFetchException as e:
1726 if isinstance(e, ChecksumError): 1726 if isinstance(e, ChecksumError):
1727 logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u) 1727 logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1728 logger.debug(1, str(e)) 1728 logger.debug(str(e))
1729 if os.path.exists(ud.localpath): 1729 if os.path.exists(ud.localpath):
1730 rename_bad_checksum(ud, e.checksum) 1730 rename_bad_checksum(ud, e.checksum)
1731 elif isinstance(e, NoChecksumError): 1731 elif isinstance(e, NoChecksumError):
1732 raise 1732 raise
1733 else: 1733 else:
1734 logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u) 1734 logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1735 logger.debug(1, str(e)) 1735 logger.debug(str(e))
1736 firsterr = e 1736 firsterr = e
1737 # Remove any incomplete fetch 1737 # Remove any incomplete fetch
1738 if not verified_stamp: 1738 if not verified_stamp:
1739 m.clean(ud, self.d) 1739 m.clean(ud, self.d)
1740 logger.debug(1, "Trying MIRRORS") 1740 logger.debug("Trying MIRRORS")
1741 mirrors = mirror_from_string(self.d.getVar('MIRRORS')) 1741 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
1742 done = m.try_mirrors(self, ud, self.d, mirrors) 1742 done = m.try_mirrors(self, ud, self.d, mirrors)
1743 1743
@@ -1774,7 +1774,7 @@ class Fetch(object):
1774 ud = self.ud[u] 1774 ud = self.ud[u]
1775 ud.setup_localpath(self.d) 1775 ud.setup_localpath(self.d)
1776 m = ud.method 1776 m = ud.method
1777 logger.debug(1, "Testing URL %s", u) 1777 logger.debug("Testing URL %s", u)
1778 # First try checking uri, u, from PREMIRRORS 1778 # First try checking uri, u, from PREMIRRORS
1779 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) 1779 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
1780 ret = m.try_mirrors(self, ud, self.d, mirrors, True) 1780 ret = m.try_mirrors(self, ud, self.d, mirrors, True)