summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2/__init__.py
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2013-11-19 14:32:08 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2013-11-22 12:03:02 +0000
commit9d7f8e2a206f8266fd0766b6161dbd1bf6b787a7 (patch)
tree2002648fd17558c7d895d672aa65fa46f4b82250 /bitbake/lib/bb/fetch2/__init__.py
parent4acc7322a2ada941b803e1aee23d84351888eef0 (diff)
downloadpoky-9d7f8e2a206f8266fd0766b6161dbd1bf6b787a7.tar.gz
bitbake: fetch2: Stop passing around the pointless url parameter
There is no good reason to keep passing around the url parameter when its contained within urldata (ud). This is left around due to legacy reasons, some functions take it, some don't and its time to cleanup. This is fetcher internal API, there are a tiny number of external users of the internal API (buildhistory and distrodata) which can be fixed up after this change. (Bitbake rev: 6a48474de9505a3700863f31839a7c53c5e18a8d) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py64
1 files changed, 32 insertions, 32 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index ae99b722aa..199cdca9ba 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -619,7 +619,7 @@ def get_srcrev(d):
619 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") 619 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
620 620
621 if len(scms) == 1 and len(urldata[scms[0]].names) == 1: 621 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
622 autoinc, rev = urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0]) 622 autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
623 if len(rev) > 10: 623 if len(rev) > 10:
624 rev = rev[:10] 624 rev = rev[:10]
625 if autoinc: 625 if autoinc:
@@ -637,7 +637,7 @@ def get_srcrev(d):
637 for scm in scms: 637 for scm in scms:
638 ud = urldata[scm] 638 ud = urldata[scm]
639 for name in ud.names: 639 for name in ud.names:
640 autoinc, rev = ud.method.sortable_revision(scm, ud, d, name) 640 autoinc, rev = ud.method.sortable_revision(ud, d, name)
641 seenautoinc = seenautoinc or autoinc 641 seenautoinc = seenautoinc or autoinc
642 if len(rev) > 10: 642 if len(rev) > 10:
643 rev = rev[:10] 643 rev = rev[:10]
@@ -777,17 +777,17 @@ def try_mirror_url(origud, ud, ld, check = False):
777 # False means try another url 777 # False means try another url
778 try: 778 try:
779 if check: 779 if check:
780 found = ud.method.checkstatus(ud.url, ud, ld) 780 found = ud.method.checkstatus(ud, ld)
781 if found: 781 if found:
782 return found 782 return found
783 return False 783 return False
784 784
785 os.chdir(ld.getVar("DL_DIR", True)) 785 os.chdir(ld.getVar("DL_DIR", True))
786 786
787 if not os.path.exists(ud.donestamp) or ud.method.need_update(ud.url, ud, ld): 787 if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
788 ud.method.download(ud.url, ud, ld) 788 ud.method.download(ud, ld)
789 if hasattr(ud.method,"build_mirror_data"): 789 if hasattr(ud.method,"build_mirror_data"):
790 ud.method.build_mirror_data(ud.url, ud, ld) 790 ud.method.build_mirror_data(ud, ld)
791 791
792 if not ud.localpath or not os.path.exists(ud.localpath): 792 if not ud.localpath or not os.path.exists(ud.localpath):
793 return False 793 return False
@@ -805,10 +805,10 @@ def try_mirror_url(origud, ud, ld, check = False):
805 dest = os.path.join(dldir, os.path.basename(ud.localpath)) 805 dest = os.path.join(dldir, os.path.basename(ud.localpath))
806 if not os.path.exists(dest): 806 if not os.path.exists(dest):
807 os.symlink(ud.localpath, dest) 807 os.symlink(ud.localpath, dest)
808 if not os.path.exists(origud.donestamp) or origud.method.need_update(origud.url, origud, ld): 808 if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
809 origud.method.download(origud.url, origud, ld) 809 origud.method.download(origud, ld)
810 if hasattr(ud.method,"build_mirror_data"): 810 if hasattr(ud.method,"build_mirror_data"):
811 origud.method.build_mirror_data(origud.url, origud, ld) 811 origud.method.build_mirror_data(origud, ld)
812 return None 812 return None
813 # Otherwise the result is a local file:// and we symlink to it 813 # Otherwise the result is a local file:// and we symlink to it
814 if not os.path.exists(origud.localpath): 814 if not os.path.exists(origud.localpath):
@@ -888,7 +888,7 @@ def srcrev_internal_helper(ud, d, name):
888 var = "SRCREV_%s_pn-%s" % (name, pn) 888 var = "SRCREV_%s_pn-%s" % (name, pn)
889 raise FetchError("Please set %s to a valid value" % var, ud.url) 889 raise FetchError("Please set %s to a valid value" % var, ud.url)
890 if rev == "AUTOINC": 890 if rev == "AUTOINC":
891 rev = ud.method.latest_revision(ud.url, ud, d, name) 891 rev = ud.method.latest_revision(ud, d, name)
892 892
893 return rev 893 return rev
894 894
@@ -1009,7 +1009,7 @@ class FetchData(object):
1009 1009
1010 self.method = None 1010 self.method = None
1011 for m in methods: 1011 for m in methods:
1012 if m.supports(url, self, d): 1012 if m.supports(self, d):
1013 self.method = m 1013 self.method = m
1014 break 1014 break
1015 1015
@@ -1031,7 +1031,7 @@ class FetchData(object):
1031 self.localpath = self.parm["localpath"] 1031 self.localpath = self.parm["localpath"]
1032 self.basename = os.path.basename(self.localpath) 1032 self.basename = os.path.basename(self.localpath)
1033 elif self.localfile: 1033 elif self.localfile:
1034 self.localpath = self.method.localpath(self.url, self, d) 1034 self.localpath = self.method.localpath(self, d)
1035 1035
1036 dldir = d.getVar("DL_DIR", True) 1036 dldir = d.getVar("DL_DIR", True)
1037 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be. 1037 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
@@ -1055,7 +1055,7 @@ class FetchData(object):
1055 1055
1056 def setup_localpath(self, d): 1056 def setup_localpath(self, d):
1057 if not self.localpath: 1057 if not self.localpath:
1058 self.localpath = self.method.localpath(self.url, self, d) 1058 self.localpath = self.method.localpath(self, d)
1059 1059
1060 def getSRCDate(self, d): 1060 def getSRCDate(self, d):
1061 """ 1061 """
@@ -1079,13 +1079,13 @@ class FetchMethod(object):
1079 def __init__(self, urls = []): 1079 def __init__(self, urls = []):
1080 self.urls = [] 1080 self.urls = []
1081 1081
1082 def supports(self, url, urldata, d): 1082 def supports(self, urldata, d):
1083 """ 1083 """
1084 Check to see if this fetch class supports a given url. 1084 Check to see if this fetch class supports a given url.
1085 """ 1085 """
1086 return 0 1086 return 0
1087 1087
1088 def localpath(self, url, urldata, d): 1088 def localpath(self, urldata, d):
1089 """ 1089 """
1090 Return the local filename of a given url assuming a successful fetch. 1090 Return the local filename of a given url assuming a successful fetch.
1091 Can also setup variables in urldata for use in go (saving code duplication 1091 Can also setup variables in urldata for use in go (saving code duplication
@@ -1129,7 +1129,7 @@ class FetchMethod(object):
1129 1129
1130 urls = property(getUrls, setUrls, None, "Urls property") 1130 urls = property(getUrls, setUrls, None, "Urls property")
1131 1131
1132 def need_update(self, url, ud, d): 1132 def need_update(self, ud, d):
1133 """ 1133 """
1134 Force a fetch, even if localpath exists? 1134 Force a fetch, even if localpath exists?
1135 """ 1135 """
@@ -1143,7 +1143,7 @@ class FetchMethod(object):
1143 """ 1143 """
1144 return False 1144 return False
1145 1145
1146 def download(self, url, urldata, d): 1146 def download(self, urldata, d):
1147 """ 1147 """
1148 Fetch urls 1148 Fetch urls
1149 Assumes localpath was called first 1149 Assumes localpath was called first
@@ -1267,13 +1267,13 @@ class FetchMethod(object):
1267 """ 1267 """
1268 bb.utils.remove(urldata.localpath) 1268 bb.utils.remove(urldata.localpath)
1269 1269
1270 def try_premirror(self, url, urldata, d): 1270 def try_premirror(self, urldata, d):
1271 """ 1271 """
1272 Should premirrors be used? 1272 Should premirrors be used?
1273 """ 1273 """
1274 return True 1274 return True
1275 1275
1276 def checkstatus(self, url, urldata, d): 1276 def checkstatus(self, urldata, d):
1277 """ 1277 """
1278 Check the status of a URL 1278 Check the status of a URL
1279 Assumes localpath was called first 1279 Assumes localpath was called first
@@ -1281,7 +1281,7 @@ class FetchMethod(object):
1281 logger.info("URL %s could not be checked for status since no method exists.", url) 1281 logger.info("URL %s could not be checked for status since no method exists.", url)
1282 return True 1282 return True
1283 1283
1284 def latest_revision(self, url, ud, d, name): 1284 def latest_revision(self, ud, d, name):
1285 """ 1285 """
1286 Look in the cache for the latest revision, if not present ask the SCM. 1286 Look in the cache for the latest revision, if not present ask the SCM.
1287 """ 1287 """
@@ -1289,19 +1289,19 @@ class FetchMethod(object):
1289 raise ParameterError("The fetcher for this URL does not support _latest_revision", url) 1289 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1290 1290
1291 revs = bb.persist_data.persist('BB_URI_HEADREVS', d) 1291 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1292 key = self.generate_revision_key(url, ud, d, name) 1292 key = self.generate_revision_key(ud, d, name)
1293 try: 1293 try:
1294 return revs[key] 1294 return revs[key]
1295 except KeyError: 1295 except KeyError:
1296 revs[key] = rev = self._latest_revision(url, ud, d, name) 1296 revs[key] = rev = self._latest_revision(ud, d, name)
1297 return rev 1297 return rev
1298 1298
1299 def sortable_revision(self, url, ud, d, name): 1299 def sortable_revision(self, ud, d, name):
1300 latest_rev = self._build_revision(url, ud, d, name) 1300 latest_rev = self._build_revision(ud, d, name)
1301 return True, str(latest_rev) 1301 return True, str(latest_rev)
1302 1302
1303 def generate_revision_key(self, url, ud, d, name): 1303 def generate_revision_key(self, ud, d, name):
1304 key = self._revision_key(url, ud, d, name) 1304 key = self._revision_key(ud, d, name)
1305 return "%s-%s" % (key, d.getVar("PN", True) or "") 1305 return "%s-%s" % (key, d.getVar("PN", True) or "")
1306 1306
1307class Fetch(object): 1307class Fetch(object):
@@ -1372,9 +1372,9 @@ class Fetch(object):
1372 try: 1372 try:
1373 self.d.setVar("BB_NO_NETWORK", network) 1373 self.d.setVar("BB_NO_NETWORK", network)
1374 1374
1375 if os.path.exists(ud.donestamp) and not m.need_update(u, ud, self.d): 1375 if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
1376 localpath = ud.localpath 1376 localpath = ud.localpath
1377 elif m.try_premirror(u, ud, self.d): 1377 elif m.try_premirror(ud, self.d):
1378 logger.debug(1, "Trying PREMIRRORS") 1378 logger.debug(1, "Trying PREMIRRORS")
1379 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) 1379 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1380 localpath = try_mirrors(self.d, ud, mirrors, False) 1380 localpath = try_mirrors(self.d, ud, mirrors, False)
@@ -1385,12 +1385,12 @@ class Fetch(object):
1385 os.chdir(self.d.getVar("DL_DIR", True)) 1385 os.chdir(self.d.getVar("DL_DIR", True))
1386 1386
1387 firsterr = None 1387 firsterr = None
1388 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(u, ud, self.d)): 1388 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
1389 try: 1389 try:
1390 logger.debug(1, "Trying Upstream") 1390 logger.debug(1, "Trying Upstream")
1391 m.download(u, ud, self.d) 1391 m.download(ud, self.d)
1392 if hasattr(m, "build_mirror_data"): 1392 if hasattr(m, "build_mirror_data"):
1393 m.build_mirror_data(u, ud, self.d) 1393 m.build_mirror_data(ud, self.d)
1394 localpath = ud.localpath 1394 localpath = ud.localpath
1395 # early checksum verify, so that if checksum mismatched, 1395 # early checksum verify, so that if checksum mismatched,
1396 # fetcher still have chance to fetch from mirror 1396 # fetcher still have chance to fetch from mirror
@@ -1452,7 +1452,7 @@ class Fetch(object):
1452 if not ret: 1452 if not ret:
1453 # Next try checking from the original uri, u 1453 # Next try checking from the original uri, u
1454 try: 1454 try:
1455 ret = m.checkstatus(u, ud, self.d) 1455 ret = m.checkstatus(ud, self.d)
1456 except: 1456 except:
1457 # Finally, try checking uri, u, from MIRRORS 1457 # Finally, try checking uri, u, from MIRRORS
1458 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) 1458 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))