diff options
Diffstat (limited to 'bitbake')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 64 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/bzr.py | 20 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/cvs.py | 10 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/git.py | 24 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/gitsm.py | 10 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/hg.py | 16 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/local.py | 16 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/osc.py | 8 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/perforce.py | 14 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/repo.py | 8 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/sftp.py | 8 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/ssh.py | 8 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/svk.py | 10 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/svn.py | 18 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/wget.py | 10 |
15 files changed, 122 insertions, 122 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index ae99b722aa..199cdca9ba 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -619,7 +619,7 @@ def get_srcrev(d): | |||
619 | raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") | 619 | raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") |
620 | 620 | ||
621 | if len(scms) == 1 and len(urldata[scms[0]].names) == 1: | 621 | if len(scms) == 1 and len(urldata[scms[0]].names) == 1: |
622 | autoinc, rev = urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0]) | 622 | autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0]) |
623 | if len(rev) > 10: | 623 | if len(rev) > 10: |
624 | rev = rev[:10] | 624 | rev = rev[:10] |
625 | if autoinc: | 625 | if autoinc: |
@@ -637,7 +637,7 @@ def get_srcrev(d): | |||
637 | for scm in scms: | 637 | for scm in scms: |
638 | ud = urldata[scm] | 638 | ud = urldata[scm] |
639 | for name in ud.names: | 639 | for name in ud.names: |
640 | autoinc, rev = ud.method.sortable_revision(scm, ud, d, name) | 640 | autoinc, rev = ud.method.sortable_revision(ud, d, name) |
641 | seenautoinc = seenautoinc or autoinc | 641 | seenautoinc = seenautoinc or autoinc |
642 | if len(rev) > 10: | 642 | if len(rev) > 10: |
643 | rev = rev[:10] | 643 | rev = rev[:10] |
@@ -777,17 +777,17 @@ def try_mirror_url(origud, ud, ld, check = False): | |||
777 | # False means try another url | 777 | # False means try another url |
778 | try: | 778 | try: |
779 | if check: | 779 | if check: |
780 | found = ud.method.checkstatus(ud.url, ud, ld) | 780 | found = ud.method.checkstatus(ud, ld) |
781 | if found: | 781 | if found: |
782 | return found | 782 | return found |
783 | return False | 783 | return False |
784 | 784 | ||
785 | os.chdir(ld.getVar("DL_DIR", True)) | 785 | os.chdir(ld.getVar("DL_DIR", True)) |
786 | 786 | ||
787 | if not os.path.exists(ud.donestamp) or ud.method.need_update(ud.url, ud, ld): | 787 | if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld): |
788 | ud.method.download(ud.url, ud, ld) | 788 | ud.method.download(ud, ld) |
789 | if hasattr(ud.method,"build_mirror_data"): | 789 | if hasattr(ud.method,"build_mirror_data"): |
790 | ud.method.build_mirror_data(ud.url, ud, ld) | 790 | ud.method.build_mirror_data(ud, ld) |
791 | 791 | ||
792 | if not ud.localpath or not os.path.exists(ud.localpath): | 792 | if not ud.localpath or not os.path.exists(ud.localpath): |
793 | return False | 793 | return False |
@@ -805,10 +805,10 @@ def try_mirror_url(origud, ud, ld, check = False): | |||
805 | dest = os.path.join(dldir, os.path.basename(ud.localpath)) | 805 | dest = os.path.join(dldir, os.path.basename(ud.localpath)) |
806 | if not os.path.exists(dest): | 806 | if not os.path.exists(dest): |
807 | os.symlink(ud.localpath, dest) | 807 | os.symlink(ud.localpath, dest) |
808 | if not os.path.exists(origud.donestamp) or origud.method.need_update(origud.url, origud, ld): | 808 | if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld): |
809 | origud.method.download(origud.url, origud, ld) | 809 | origud.method.download(origud, ld) |
810 | if hasattr(ud.method,"build_mirror_data"): | 810 | if hasattr(ud.method,"build_mirror_data"): |
811 | origud.method.build_mirror_data(origud.url, origud, ld) | 811 | origud.method.build_mirror_data(origud, ld) |
812 | return None | 812 | return None |
813 | # Otherwise the result is a local file:// and we symlink to it | 813 | # Otherwise the result is a local file:// and we symlink to it |
814 | if not os.path.exists(origud.localpath): | 814 | if not os.path.exists(origud.localpath): |
@@ -888,7 +888,7 @@ def srcrev_internal_helper(ud, d, name): | |||
888 | var = "SRCREV_%s_pn-%s" % (name, pn) | 888 | var = "SRCREV_%s_pn-%s" % (name, pn) |
889 | raise FetchError("Please set %s to a valid value" % var, ud.url) | 889 | raise FetchError("Please set %s to a valid value" % var, ud.url) |
890 | if rev == "AUTOINC": | 890 | if rev == "AUTOINC": |
891 | rev = ud.method.latest_revision(ud.url, ud, d, name) | 891 | rev = ud.method.latest_revision(ud, d, name) |
892 | 892 | ||
893 | return rev | 893 | return rev |
894 | 894 | ||
@@ -1009,7 +1009,7 @@ class FetchData(object): | |||
1009 | 1009 | ||
1010 | self.method = None | 1010 | self.method = None |
1011 | for m in methods: | 1011 | for m in methods: |
1012 | if m.supports(url, self, d): | 1012 | if m.supports(self, d): |
1013 | self.method = m | 1013 | self.method = m |
1014 | break | 1014 | break |
1015 | 1015 | ||
@@ -1031,7 +1031,7 @@ class FetchData(object): | |||
1031 | self.localpath = self.parm["localpath"] | 1031 | self.localpath = self.parm["localpath"] |
1032 | self.basename = os.path.basename(self.localpath) | 1032 | self.basename = os.path.basename(self.localpath) |
1033 | elif self.localfile: | 1033 | elif self.localfile: |
1034 | self.localpath = self.method.localpath(self.url, self, d) | 1034 | self.localpath = self.method.localpath(self, d) |
1035 | 1035 | ||
1036 | dldir = d.getVar("DL_DIR", True) | 1036 | dldir = d.getVar("DL_DIR", True) |
1037 | # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be. | 1037 | # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be. |
@@ -1055,7 +1055,7 @@ class FetchData(object): | |||
1055 | 1055 | ||
1056 | def setup_localpath(self, d): | 1056 | def setup_localpath(self, d): |
1057 | if not self.localpath: | 1057 | if not self.localpath: |
1058 | self.localpath = self.method.localpath(self.url, self, d) | 1058 | self.localpath = self.method.localpath(self, d) |
1059 | 1059 | ||
1060 | def getSRCDate(self, d): | 1060 | def getSRCDate(self, d): |
1061 | """ | 1061 | """ |
@@ -1079,13 +1079,13 @@ class FetchMethod(object): | |||
1079 | def __init__(self, urls = []): | 1079 | def __init__(self, urls = []): |
1080 | self.urls = [] | 1080 | self.urls = [] |
1081 | 1081 | ||
1082 | def supports(self, url, urldata, d): | 1082 | def supports(self, urldata, d): |
1083 | """ | 1083 | """ |
1084 | Check to see if this fetch class supports a given url. | 1084 | Check to see if this fetch class supports a given url. |
1085 | """ | 1085 | """ |
1086 | return 0 | 1086 | return 0 |
1087 | 1087 | ||
1088 | def localpath(self, url, urldata, d): | 1088 | def localpath(self, urldata, d): |
1089 | """ | 1089 | """ |
1090 | Return the local filename of a given url assuming a successful fetch. | 1090 | Return the local filename of a given url assuming a successful fetch. |
1091 | Can also setup variables in urldata for use in go (saving code duplication | 1091 | Can also setup variables in urldata for use in go (saving code duplication |
@@ -1129,7 +1129,7 @@ class FetchMethod(object): | |||
1129 | 1129 | ||
1130 | urls = property(getUrls, setUrls, None, "Urls property") | 1130 | urls = property(getUrls, setUrls, None, "Urls property") |
1131 | 1131 | ||
1132 | def need_update(self, url, ud, d): | 1132 | def need_update(self, ud, d): |
1133 | """ | 1133 | """ |
1134 | Force a fetch, even if localpath exists? | 1134 | Force a fetch, even if localpath exists? |
1135 | """ | 1135 | """ |
@@ -1143,7 +1143,7 @@ class FetchMethod(object): | |||
1143 | """ | 1143 | """ |
1144 | return False | 1144 | return False |
1145 | 1145 | ||
1146 | def download(self, url, urldata, d): | 1146 | def download(self, urldata, d): |
1147 | """ | 1147 | """ |
1148 | Fetch urls | 1148 | Fetch urls |
1149 | Assumes localpath was called first | 1149 | Assumes localpath was called first |
@@ -1267,13 +1267,13 @@ class FetchMethod(object): | |||
1267 | """ | 1267 | """ |
1268 | bb.utils.remove(urldata.localpath) | 1268 | bb.utils.remove(urldata.localpath) |
1269 | 1269 | ||
1270 | def try_premirror(self, url, urldata, d): | 1270 | def try_premirror(self, urldata, d): |
1271 | """ | 1271 | """ |
1272 | Should premirrors be used? | 1272 | Should premirrors be used? |
1273 | """ | 1273 | """ |
1274 | return True | 1274 | return True |
1275 | 1275 | ||
1276 | def checkstatus(self, url, urldata, d): | 1276 | def checkstatus(self, urldata, d): |
1277 | """ | 1277 | """ |
1278 | Check the status of a URL | 1278 | Check the status of a URL |
1279 | Assumes localpath was called first | 1279 | Assumes localpath was called first |
@@ -1281,7 +1281,7 @@ class FetchMethod(object): | |||
1281 | logger.info("URL %s could not be checked for status since no method exists.", url) | 1281 | logger.info("URL %s could not be checked for status since no method exists.", url) |
1282 | return True | 1282 | return True |
1283 | 1283 | ||
1284 | def latest_revision(self, url, ud, d, name): | 1284 | def latest_revision(self, ud, d, name): |
1285 | """ | 1285 | """ |
1286 | Look in the cache for the latest revision, if not present ask the SCM. | 1286 | Look in the cache for the latest revision, if not present ask the SCM. |
1287 | """ | 1287 | """ |
@@ -1289,19 +1289,19 @@ class FetchMethod(object): | |||
1289 | raise ParameterError("The fetcher for this URL does not support _latest_revision", url) | 1289 | raise ParameterError("The fetcher for this URL does not support _latest_revision", url) |
1290 | 1290 | ||
1291 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) | 1291 | revs = bb.persist_data.persist('BB_URI_HEADREVS', d) |
1292 | key = self.generate_revision_key(url, ud, d, name) | 1292 | key = self.generate_revision_key(ud, d, name) |
1293 | try: | 1293 | try: |
1294 | return revs[key] | 1294 | return revs[key] |
1295 | except KeyError: | 1295 | except KeyError: |
1296 | revs[key] = rev = self._latest_revision(url, ud, d, name) | 1296 | revs[key] = rev = self._latest_revision(ud, d, name) |
1297 | return rev | 1297 | return rev |
1298 | 1298 | ||
1299 | def sortable_revision(self, url, ud, d, name): | 1299 | def sortable_revision(self, ud, d, name): |
1300 | latest_rev = self._build_revision(url, ud, d, name) | 1300 | latest_rev = self._build_revision(ud, d, name) |
1301 | return True, str(latest_rev) | 1301 | return True, str(latest_rev) |
1302 | 1302 | ||
1303 | def generate_revision_key(self, url, ud, d, name): | 1303 | def generate_revision_key(self, ud, d, name): |
1304 | key = self._revision_key(url, ud, d, name) | 1304 | key = self._revision_key(ud, d, name) |
1305 | return "%s-%s" % (key, d.getVar("PN", True) or "") | 1305 | return "%s-%s" % (key, d.getVar("PN", True) or "") |
1306 | 1306 | ||
1307 | class Fetch(object): | 1307 | class Fetch(object): |
@@ -1372,9 +1372,9 @@ class Fetch(object): | |||
1372 | try: | 1372 | try: |
1373 | self.d.setVar("BB_NO_NETWORK", network) | 1373 | self.d.setVar("BB_NO_NETWORK", network) |
1374 | 1374 | ||
1375 | if os.path.exists(ud.donestamp) and not m.need_update(u, ud, self.d): | 1375 | if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d): |
1376 | localpath = ud.localpath | 1376 | localpath = ud.localpath |
1377 | elif m.try_premirror(u, ud, self.d): | 1377 | elif m.try_premirror(ud, self.d): |
1378 | logger.debug(1, "Trying PREMIRRORS") | 1378 | logger.debug(1, "Trying PREMIRRORS") |
1379 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) | 1379 | mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) |
1380 | localpath = try_mirrors(self.d, ud, mirrors, False) | 1380 | localpath = try_mirrors(self.d, ud, mirrors, False) |
@@ -1385,12 +1385,12 @@ class Fetch(object): | |||
1385 | os.chdir(self.d.getVar("DL_DIR", True)) | 1385 | os.chdir(self.d.getVar("DL_DIR", True)) |
1386 | 1386 | ||
1387 | firsterr = None | 1387 | firsterr = None |
1388 | if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(u, ud, self.d)): | 1388 | if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)): |
1389 | try: | 1389 | try: |
1390 | logger.debug(1, "Trying Upstream") | 1390 | logger.debug(1, "Trying Upstream") |
1391 | m.download(u, ud, self.d) | 1391 | m.download(ud, self.d) |
1392 | if hasattr(m, "build_mirror_data"): | 1392 | if hasattr(m, "build_mirror_data"): |
1393 | m.build_mirror_data(u, ud, self.d) | 1393 | m.build_mirror_data(ud, self.d) |
1394 | localpath = ud.localpath | 1394 | localpath = ud.localpath |
1395 | # early checksum verify, so that if checksum mismatched, | 1395 | # early checksum verify, so that if checksum mismatched, |
1396 | # fetcher still have chance to fetch from mirror | 1396 | # fetcher still have chance to fetch from mirror |
@@ -1452,7 +1452,7 @@ class Fetch(object): | |||
1452 | if not ret: | 1452 | if not ret: |
1453 | # Next try checking from the original uri, u | 1453 | # Next try checking from the original uri, u |
1454 | try: | 1454 | try: |
1455 | ret = m.checkstatus(u, ud, self.d) | 1455 | ret = m.checkstatus(ud, self.d) |
1456 | except: | 1456 | except: |
1457 | # Finally, try checking uri, u, from MIRRORS | 1457 | # Finally, try checking uri, u, from MIRRORS |
1458 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) | 1458 | mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) |
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py index 5d9e5f907c..3caaa6fca9 100644 --- a/bitbake/lib/bb/fetch2/bzr.py +++ b/bitbake/lib/bb/fetch2/bzr.py | |||
@@ -34,7 +34,7 @@ from bb.fetch2 import runfetchcmd | |||
34 | from bb.fetch2 import logger | 34 | from bb.fetch2 import logger |
35 | 35 | ||
36 | class Bzr(FetchMethod): | 36 | class Bzr(FetchMethod): |
37 | def supports(self, url, ud, d): | 37 | def supports(self, ud, d): |
38 | return ud.type in ['bzr'] | 38 | return ud.type in ['bzr'] |
39 | 39 | ||
40 | def urldata_init(self, ud, d): | 40 | def urldata_init(self, ud, d): |
@@ -81,12 +81,12 @@ class Bzr(FetchMethod): | |||
81 | 81 | ||
82 | return bzrcmd | 82 | return bzrcmd |
83 | 83 | ||
84 | def download(self, loc, ud, d): | 84 | def download(self, ud, d): |
85 | """Fetch url""" | 85 | """Fetch url""" |
86 | 86 | ||
87 | if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): | 87 | if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): |
88 | bzrcmd = self._buildbzrcommand(ud, d, "update") | 88 | bzrcmd = self._buildbzrcommand(ud, d, "update") |
89 | logger.debug(1, "BZR Update %s", loc) | 89 | logger.debug(1, "BZR Update %s", ud.url) |
90 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) | 90 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) |
91 | os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) | 91 | os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) |
92 | runfetchcmd(bzrcmd, d) | 92 | runfetchcmd(bzrcmd, d) |
@@ -94,7 +94,7 @@ class Bzr(FetchMethod): | |||
94 | bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) | 94 | bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) |
95 | bzrcmd = self._buildbzrcommand(ud, d, "fetch") | 95 | bzrcmd = self._buildbzrcommand(ud, d, "fetch") |
96 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) | 96 | bb.fetch2.check_network_access(d, bzrcmd, ud.url) |
97 | logger.debug(1, "BZR Checkout %s", loc) | 97 | logger.debug(1, "BZR Checkout %s", ud.url) |
98 | bb.utils.mkdirhier(ud.pkgdir) | 98 | bb.utils.mkdirhier(ud.pkgdir) |
99 | os.chdir(ud.pkgdir) | 99 | os.chdir(ud.pkgdir) |
100 | logger.debug(1, "Running %s", bzrcmd) | 100 | logger.debug(1, "Running %s", bzrcmd) |
@@ -114,17 +114,17 @@ class Bzr(FetchMethod): | |||
114 | def supports_srcrev(self): | 114 | def supports_srcrev(self): |
115 | return True | 115 | return True |
116 | 116 | ||
117 | def _revision_key(self, url, ud, d, name): | 117 | def _revision_key(self, ud, d, name): |
118 | """ | 118 | """ |
119 | Return a unique key for the url | 119 | Return a unique key for the url |
120 | """ | 120 | """ |
121 | return "bzr:" + ud.pkgdir | 121 | return "bzr:" + ud.pkgdir |
122 | 122 | ||
123 | def _latest_revision(self, url, ud, d, name): | 123 | def _latest_revision(self, ud, d, name): |
124 | """ | 124 | """ |
125 | Return the latest upstream revision number | 125 | Return the latest upstream revision number |
126 | """ | 126 | """ |
127 | logger.debug(2, "BZR fetcher hitting network for %s", url) | 127 | logger.debug(2, "BZR fetcher hitting network for %s", ud.url) |
128 | 128 | ||
129 | bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) | 129 | bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) |
130 | 130 | ||
@@ -132,12 +132,12 @@ class Bzr(FetchMethod): | |||
132 | 132 | ||
133 | return output.strip() | 133 | return output.strip() |
134 | 134 | ||
135 | def sortable_revision(self, url, ud, d, name): | 135 | def sortable_revision(self, ud, d, name): |
136 | """ | 136 | """ |
137 | Return a sortable revision number which in our case is the revision number | 137 | Return a sortable revision number which in our case is the revision number |
138 | """ | 138 | """ |
139 | 139 | ||
140 | return False, self._build_revision(url, ud, d) | 140 | return False, self._build_revision(ud, d) |
141 | 141 | ||
142 | def _build_revision(self, url, ud, d): | 142 | def _build_revision(self, ud, d): |
143 | return ud.revision | 143 | return ud.revision |
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py index 0a672a33ef..d27d96f68c 100644 --- a/bitbake/lib/bb/fetch2/cvs.py +++ b/bitbake/lib/bb/fetch2/cvs.py | |||
@@ -36,7 +36,7 @@ class Cvs(FetchMethod): | |||
36 | """ | 36 | """ |
37 | Class to fetch a module or modules from cvs repositories | 37 | Class to fetch a module or modules from cvs repositories |
38 | """ | 38 | """ |
39 | def supports(self, url, ud, d): | 39 | def supports(self, ud, d): |
40 | """ | 40 | """ |
41 | Check to see if a given url can be fetched with cvs. | 41 | Check to see if a given url can be fetched with cvs. |
42 | """ | 42 | """ |
@@ -65,14 +65,14 @@ class Cvs(FetchMethod): | |||
65 | 65 | ||
66 | ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) | 66 | ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) |
67 | 67 | ||
68 | def need_update(self, url, ud, d): | 68 | def need_update(self, ud, d): |
69 | if (ud.date == "now"): | 69 | if (ud.date == "now"): |
70 | return True | 70 | return True |
71 | if not os.path.exists(ud.localpath): | 71 | if not os.path.exists(ud.localpath): |
72 | return True | 72 | return True |
73 | return False | 73 | return False |
74 | 74 | ||
75 | def download(self, loc, ud, d): | 75 | def download(self, ud, d): |
76 | 76 | ||
77 | method = ud.parm.get('method', 'pserver') | 77 | method = ud.parm.get('method', 'pserver') |
78 | localdir = ud.parm.get('localdir', ud.module) | 78 | localdir = ud.parm.get('localdir', ud.module) |
@@ -124,13 +124,13 @@ class Cvs(FetchMethod): | |||
124 | pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg) | 124 | pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg) |
125 | moddir = os.path.join(pkgdir, localdir) | 125 | moddir = os.path.join(pkgdir, localdir) |
126 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): | 126 | if os.access(os.path.join(moddir, 'CVS'), os.R_OK): |
127 | logger.info("Update " + loc) | 127 | logger.info("Update " + ud.url) |
128 | bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) | 128 | bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) |
129 | # update sources there | 129 | # update sources there |
130 | os.chdir(moddir) | 130 | os.chdir(moddir) |
131 | cmd = cvsupdatecmd | 131 | cmd = cvsupdatecmd |
132 | else: | 132 | else: |
133 | logger.info("Fetch " + loc) | 133 | logger.info("Fetch " + ud.url) |
134 | # check out sources there | 134 | # check out sources there |
135 | bb.utils.mkdirhier(pkgdir) | 135 | bb.utils.mkdirhier(pkgdir) |
136 | os.chdir(pkgdir) | 136 | os.chdir(pkgdir) |
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py index a901a44f03..b2366317c8 100644 --- a/bitbake/lib/bb/fetch2/git.py +++ b/bitbake/lib/bb/fetch2/git.py | |||
@@ -73,7 +73,7 @@ class Git(FetchMethod): | |||
73 | def init(self, d): | 73 | def init(self, d): |
74 | pass | 74 | pass |
75 | 75 | ||
76 | def supports(self, url, ud, d): | 76 | def supports(self, ud, d): |
77 | """ | 77 | """ |
78 | Check to see if a given url can be fetched with git. | 78 | Check to see if a given url can be fetched with git. |
79 | """ | 79 | """ |
@@ -142,10 +142,10 @@ class Git(FetchMethod): | |||
142 | 142 | ||
143 | ud.localfile = ud.clonedir | 143 | ud.localfile = ud.clonedir |
144 | 144 | ||
145 | def localpath(self, url, ud, d): | 145 | def localpath(self, ud, d): |
146 | return ud.clonedir | 146 | return ud.clonedir |
147 | 147 | ||
148 | def need_update(self, u, ud, d): | 148 | def need_update(self, ud, d): |
149 | if not os.path.exists(ud.clonedir): | 149 | if not os.path.exists(ud.clonedir): |
150 | return True | 150 | return True |
151 | os.chdir(ud.clonedir) | 151 | os.chdir(ud.clonedir) |
@@ -156,7 +156,7 @@ class Git(FetchMethod): | |||
156 | return True | 156 | return True |
157 | return False | 157 | return False |
158 | 158 | ||
159 | def try_premirror(self, u, ud, d): | 159 | def try_premirror(self, ud, d): |
160 | # If we don't do this, updating an existing checkout with only premirrors | 160 | # If we don't do this, updating an existing checkout with only premirrors |
161 | # is not possible | 161 | # is not possible |
162 | if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: | 162 | if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: |
@@ -165,7 +165,7 @@ class Git(FetchMethod): | |||
165 | return False | 165 | return False |
166 | return True | 166 | return True |
167 | 167 | ||
168 | def download(self, loc, ud, d): | 168 | def download(self, ud, d): |
169 | """Fetch url""" | 169 | """Fetch url""" |
170 | 170 | ||
171 | if ud.user: | 171 | if ud.user: |
@@ -214,7 +214,7 @@ class Git(FetchMethod): | |||
214 | runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) | 214 | runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) |
215 | ud.repochanged = True | 215 | ud.repochanged = True |
216 | 216 | ||
217 | def build_mirror_data(self, url, ud, d): | 217 | def build_mirror_data(self, ud, d): |
218 | # Generate a mirror tarball if needed | 218 | # Generate a mirror tarball if needed |
219 | if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)): | 219 | if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)): |
220 | # it's possible that this symlink points to read-only filesystem with PREMIRROR | 220 | # it's possible that this symlink points to read-only filesystem with PREMIRROR |
@@ -292,13 +292,13 @@ class Git(FetchMethod): | |||
292 | raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) | 292 | raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) |
293 | return output.split()[0] != "0" | 293 | return output.split()[0] != "0" |
294 | 294 | ||
295 | def _revision_key(self, url, ud, d, name): | 295 | def _revision_key(self, ud, d, name): |
296 | """ | 296 | """ |
297 | Return a unique key for the url | 297 | Return a unique key for the url |
298 | """ | 298 | """ |
299 | return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name] | 299 | return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name] |
300 | 300 | ||
301 | def _latest_revision(self, url, ud, d, name): | 301 | def _latest_revision(self, ud, d, name): |
302 | """ | 302 | """ |
303 | Compute the HEAD revision for the url | 303 | Compute the HEAD revision for the url |
304 | """ | 304 | """ |
@@ -314,14 +314,14 @@ class Git(FetchMethod): | |||
314 | bb.fetch2.check_network_access(d, cmd) | 314 | bb.fetch2.check_network_access(d, cmd) |
315 | output = runfetchcmd(cmd, d, True) | 315 | output = runfetchcmd(cmd, d, True) |
316 | if not output: | 316 | if not output: |
317 | raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url) | 317 | raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url) |
318 | return output.split()[0] | 318 | return output.split()[0] |
319 | 319 | ||
320 | def _build_revision(self, url, ud, d, name): | 320 | def _build_revision(self, ud, d, name): |
321 | return ud.revisions[name] | 321 | return ud.revisions[name] |
322 | 322 | ||
323 | def checkstatus(self, uri, ud, d): | 323 | def checkstatus(self, ud, d): |
324 | fetchcmd = "%s ls-remote %s" % (ud.basecmd, uri) | 324 | fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url) |
325 | try: | 325 | try: |
326 | runfetchcmd(fetchcmd, d, quiet=True) | 326 | runfetchcmd(fetchcmd, d, quiet=True) |
327 | return True | 327 | return True |
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py index 572b637c9a..4093f6e846 100644 --- a/bitbake/lib/bb/fetch2/gitsm.py +++ b/bitbake/lib/bb/fetch2/gitsm.py | |||
@@ -27,7 +27,7 @@ from bb.fetch2 import runfetchcmd | |||
27 | from bb.fetch2 import logger | 27 | from bb.fetch2 import logger |
28 | 28 | ||
29 | class GitSM(Git): | 29 | class GitSM(Git): |
30 | def supports(self, url, ud, d): | 30 | def supports(self, ud, d): |
31 | """ | 31 | """ |
32 | Check to see if a given url can be fetched with git. | 32 | Check to see if a given url can be fetched with git. |
33 | """ | 33 | """ |
@@ -42,7 +42,7 @@ class GitSM(Git): | |||
42 | pass | 42 | pass |
43 | return False | 43 | return False |
44 | 44 | ||
45 | def update_submodules(self, u, ud, d): | 45 | def update_submodules(self, ud, d): |
46 | # We have to convert bare -> full repo, do the submodule bit, then convert back | 46 | # We have to convert bare -> full repo, do the submodule bit, then convert back |
47 | tmpclonedir = ud.clonedir + ".tmp" | 47 | tmpclonedir = ud.clonedir + ".tmp" |
48 | gitdir = tmpclonedir + os.sep + ".git" | 48 | gitdir = tmpclonedir + os.sep + ".git" |
@@ -58,13 +58,13 @@ class GitSM(Git): | |||
58 | os.rename(gitdir, ud.clonedir,) | 58 | os.rename(gitdir, ud.clonedir,) |
59 | bb.utils.remove(tmpclonedir, True) | 59 | bb.utils.remove(tmpclonedir, True) |
60 | 60 | ||
61 | def download(self, loc, ud, d): | 61 | def download(self, ud, d): |
62 | Git.download(self, loc, ud, d) | 62 | Git.download(self, ud, d) |
63 | 63 | ||
64 | os.chdir(ud.clonedir) | 64 | os.chdir(ud.clonedir) |
65 | submodules = self.uses_submodules(ud, d) | 65 | submodules = self.uses_submodules(ud, d) |
66 | if submodules: | 66 | if submodules: |
67 | self.update_submodules(loc, ud, d) | 67 | self.update_submodules(ud, d) |
68 | 68 | ||
69 | def unpack(self, ud, destdir, d): | 69 | def unpack(self, ud, destdir, d): |
70 | Git.unpack(self, ud, destdir, d) | 70 | Git.unpack(self, ud, destdir, d) |
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py index cf214816b7..76810abb7d 100644 --- a/bitbake/lib/bb/fetch2/hg.py +++ b/bitbake/lib/bb/fetch2/hg.py | |||
@@ -37,7 +37,7 @@ from bb.fetch2 import logger | |||
37 | 37 | ||
38 | class Hg(FetchMethod): | 38 | class Hg(FetchMethod): |
39 | """Class to fetch from mercurial repositories""" | 39 | """Class to fetch from mercurial repositories""" |
40 | def supports(self, url, ud, d): | 40 | def supports(self, ud, d): |
41 | """ | 41 | """ |
42 | Check to see if a given url can be fetched with mercurial. | 42 | Check to see if a given url can be fetched with mercurial. |
43 | """ | 43 | """ |
@@ -66,7 +66,7 @@ class Hg(FetchMethod): | |||
66 | 66 | ||
67 | ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) | 67 | ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) |
68 | 68 | ||
69 | def need_update(self, url, ud, d): | 69 | def need_update(self, ud, d): |
70 | revTag = ud.parm.get('rev', 'tip') | 70 | revTag = ud.parm.get('rev', 'tip') |
71 | if revTag == "tip": | 71 | if revTag == "tip": |
72 | return True | 72 | return True |
@@ -126,14 +126,14 @@ class Hg(FetchMethod): | |||
126 | 126 | ||
127 | return cmd | 127 | return cmd |
128 | 128 | ||
129 | def download(self, loc, ud, d): | 129 | def download(self, ud, d): |
130 | """Fetch url""" | 130 | """Fetch url""" |
131 | 131 | ||
132 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 132 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") |
133 | 133 | ||
134 | if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): | 134 | if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): |
135 | updatecmd = self._buildhgcommand(ud, d, "pull") | 135 | updatecmd = self._buildhgcommand(ud, d, "pull") |
136 | logger.info("Update " + loc) | 136 | logger.info("Update " + ud.url) |
137 | # update sources there | 137 | # update sources there |
138 | os.chdir(ud.moddir) | 138 | os.chdir(ud.moddir) |
139 | logger.debug(1, "Running %s", updatecmd) | 139 | logger.debug(1, "Running %s", updatecmd) |
@@ -142,7 +142,7 @@ class Hg(FetchMethod): | |||
142 | 142 | ||
143 | else: | 143 | else: |
144 | fetchcmd = self._buildhgcommand(ud, d, "fetch") | 144 | fetchcmd = self._buildhgcommand(ud, d, "fetch") |
145 | logger.info("Fetch " + loc) | 145 | logger.info("Fetch " + ud.url) |
146 | # check out sources there | 146 | # check out sources there |
147 | bb.utils.mkdirhier(ud.pkgdir) | 147 | bb.utils.mkdirhier(ud.pkgdir) |
148 | os.chdir(ud.pkgdir) | 148 | os.chdir(ud.pkgdir) |
@@ -169,7 +169,7 @@ class Hg(FetchMethod): | |||
169 | def supports_srcrev(self): | 169 | def supports_srcrev(self): |
170 | return True | 170 | return True |
171 | 171 | ||
172 | def _latest_revision(self, url, ud, d, name): | 172 | def _latest_revision(self, ud, d, name): |
173 | """ | 173 | """ |
174 | Compute tip revision for the url | 174 | Compute tip revision for the url |
175 | """ | 175 | """ |
@@ -177,10 +177,10 @@ class Hg(FetchMethod): | |||
177 | output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) | 177 | output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) |
178 | return output.strip() | 178 | return output.strip() |
179 | 179 | ||
180 | def _build_revision(self, url, ud, d, name): | 180 | def _build_revision(self, ud, d, name): |
181 | return ud.revision | 181 | return ud.revision |
182 | 182 | ||
183 | def _revision_key(self, url, ud, d, name): | 183 | def _revision_key(self, ud, d, name): |
184 | """ | 184 | """ |
185 | Return a unique key for the url | 185 | Return a unique key for the url |
186 | """ | 186 | """ |
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py index 58bbe20327..5c4e42a942 100644 --- a/bitbake/lib/bb/fetch2/local.py +++ b/bitbake/lib/bb/fetch2/local.py | |||
@@ -34,7 +34,7 @@ from bb.fetch2 import FetchMethod, FetchError | |||
34 | from bb.fetch2 import logger | 34 | from bb.fetch2 import logger |
35 | 35 | ||
36 | class Local(FetchMethod): | 36 | class Local(FetchMethod): |
37 | def supports(self, url, urldata, d): | 37 | def supports(self, urldata, d): |
38 | """ | 38 | """ |
39 | Check to see if a given url represents a local fetch. | 39 | Check to see if a given url represents a local fetch. |
40 | """ | 40 | """ |
@@ -47,7 +47,7 @@ class Local(FetchMethod): | |||
47 | ud.basepath = ud.decodedurl | 47 | ud.basepath = ud.decodedurl |
48 | return | 48 | return |
49 | 49 | ||
50 | def localpath(self, url, urldata, d): | 50 | def localpath(self, urldata, d): |
51 | """ | 51 | """ |
52 | Return the local filename of a given url assuming a successful fetch. | 52 | Return the local filename of a given url assuming a successful fetch. |
53 | """ | 53 | """ |
@@ -75,14 +75,14 @@ class Local(FetchMethod): | |||
75 | return dldirfile | 75 | return dldirfile |
76 | return newpath | 76 | return newpath |
77 | 77 | ||
78 | def need_update(self, url, ud, d): | 78 | def need_update(self, ud, d): |
79 | if url.find("*") != -1: | 79 | if ud.url.find("*") != -1: |
80 | return False | 80 | return False |
81 | if os.path.exists(ud.localpath): | 81 | if os.path.exists(ud.localpath): |
82 | return False | 82 | return False |
83 | return True | 83 | return True |
84 | 84 | ||
85 | def download(self, url, urldata, d): | 85 | def download(self, urldata, d): |
86 | """Fetch urls (no-op for Local method)""" | 86 | """Fetch urls (no-op for Local method)""" |
87 | # no need to fetch local files, we'll deal with them in place. | 87 | # no need to fetch local files, we'll deal with them in place. |
88 | if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): | 88 | if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): |
@@ -95,17 +95,17 @@ class Local(FetchMethod): | |||
95 | locations.append(filesdir) | 95 | locations.append(filesdir) |
96 | locations.append(d.getVar("DL_DIR", True)) | 96 | locations.append(d.getVar("DL_DIR", True)) |
97 | 97 | ||
98 | msg = "Unable to find file " + url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) | 98 | msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) |
99 | raise FetchError(msg) | 99 | raise FetchError(msg) |
100 | 100 | ||
101 | return True | 101 | return True |
102 | 102 | ||
103 | def checkstatus(self, url, urldata, d): | 103 | def checkstatus(self, urldata, d): |
104 | """ | 104 | """ |
105 | Check the status of the url | 105 | Check the status of the url |
106 | """ | 106 | """ |
107 | if urldata.localpath.find("*") != -1: | 107 | if urldata.localpath.find("*") != -1: |
108 | logger.info("URL %s looks like a glob and was therefore not checked.", url) | 108 | logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url) |
109 | return True | 109 | return True |
110 | if os.path.exists(urldata.localpath): | 110 | if os.path.exists(urldata.localpath): |
111 | return True | 111 | return True |
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py index 1a3a7bb56b..3d8779682f 100644 --- a/bitbake/lib/bb/fetch2/osc.py +++ b/bitbake/lib/bb/fetch2/osc.py | |||
@@ -20,7 +20,7 @@ class Osc(FetchMethod): | |||
20 | """Class to fetch a module or modules from Opensuse build server | 20 | """Class to fetch a module or modules from Opensuse build server |
21 | repositories.""" | 21 | repositories.""" |
22 | 22 | ||
23 | def supports(self, url, ud, d): | 23 | def supports(self, ud, d): |
24 | """ | 24 | """ |
25 | Check to see if a given url can be fetched with osc. | 25 | Check to see if a given url can be fetched with osc. |
26 | """ | 26 | """ |
@@ -77,7 +77,7 @@ class Osc(FetchMethod): | |||
77 | 77 | ||
78 | return osccmd | 78 | return osccmd |
79 | 79 | ||
80 | def download(self, loc, ud, d): | 80 | def download(self, ud, d): |
81 | """ | 81 | """ |
82 | Fetch url | 82 | Fetch url |
83 | """ | 83 | """ |
@@ -86,7 +86,7 @@ class Osc(FetchMethod): | |||
86 | 86 | ||
87 | if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): | 87 | if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): |
88 | oscupdatecmd = self._buildosccommand(ud, d, "update") | 88 | oscupdatecmd = self._buildosccommand(ud, d, "update") |
89 | logger.info("Update "+ loc) | 89 | logger.info("Update "+ ud.url) |
90 | # update sources there | 90 | # update sources there |
91 | os.chdir(ud.moddir) | 91 | os.chdir(ud.moddir) |
92 | logger.debug(1, "Running %s", oscupdatecmd) | 92 | logger.debug(1, "Running %s", oscupdatecmd) |
@@ -94,7 +94,7 @@ class Osc(FetchMethod): | |||
94 | runfetchcmd(oscupdatecmd, d) | 94 | runfetchcmd(oscupdatecmd, d) |
95 | else: | 95 | else: |
96 | oscfetchcmd = self._buildosccommand(ud, d, "fetch") | 96 | oscfetchcmd = self._buildosccommand(ud, d, "fetch") |
97 | logger.info("Fetch " + loc) | 97 | logger.info("Fetch " + ud.url) |
98 | # check out sources there | 98 | # check out sources there |
99 | bb.utils.mkdirhier(ud.pkgdir) | 99 | bb.utils.mkdirhier(ud.pkgdir) |
100 | os.chdir(ud.pkgdir) | 100 | os.chdir(ud.pkgdir) |
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py index fc4074d5a3..ac1bfc7df8 100644 --- a/bitbake/lib/bb/fetch2/perforce.py +++ b/bitbake/lib/bb/fetch2/perforce.py | |||
@@ -37,7 +37,7 @@ from bb.fetch2 import logger | |||
37 | from bb.fetch2 import runfetchcmd | 37 | from bb.fetch2 import runfetchcmd |
38 | 38 | ||
39 | class Perforce(FetchMethod): | 39 | class Perforce(FetchMethod): |
40 | def supports(self, url, ud, d): | 40 | def supports(self, ud, d): |
41 | return ud.type in ['p4'] | 41 | return ud.type in ['p4'] |
42 | 42 | ||
43 | def doparse(url, d): | 43 | def doparse(url, d): |
@@ -120,12 +120,12 @@ class Perforce(FetchMethod): | |||
120 | 120 | ||
121 | ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d) | 121 | ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d) |
122 | 122 | ||
123 | def download(self, loc, ud, d): | 123 | def download(self, ud, d): |
124 | """ | 124 | """ |
125 | Fetch urls | 125 | Fetch urls |
126 | """ | 126 | """ |
127 | 127 | ||
128 | (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) | 128 | (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d) |
129 | 129 | ||
130 | if depot.find('/...') != -1: | 130 | if depot.find('/...') != -1: |
131 | path = depot[:depot.find('/...')] | 131 | path = depot[:depot.find('/...')] |
@@ -158,7 +158,7 @@ class Perforce(FetchMethod): | |||
158 | tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") | 158 | tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") |
159 | tmpfile = tmpfile.strip() | 159 | tmpfile = tmpfile.strip() |
160 | if not tmpfile: | 160 | if not tmpfile: |
161 | raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) | 161 | raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) |
162 | 162 | ||
163 | if "label" in parm: | 163 | if "label" in parm: |
164 | depot = "%s@%s" % (depot, parm["label"]) | 164 | depot = "%s@%s" % (depot, parm["label"]) |
@@ -167,13 +167,13 @@ class Perforce(FetchMethod): | |||
167 | depot = "%s@%s" % (depot, cset) | 167 | depot = "%s@%s" % (depot, cset) |
168 | 168 | ||
169 | os.chdir(tmpfile) | 169 | os.chdir(tmpfile) |
170 | logger.info("Fetch " + loc) | 170 | logger.info("Fetch " + ud.url) |
171 | logger.info("%s%s files %s", p4cmd, p4opt, depot) | 171 | logger.info("%s%s files %s", p4cmd, p4opt, depot) |
172 | p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot)) | 172 | p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot)) |
173 | p4file = [f.rstrip() for f in p4file.splitlines()] | 173 | p4file = [f.rstrip() for f in p4file.splitlines()] |
174 | 174 | ||
175 | if not p4file: | 175 | if not p4file: |
176 | raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc) | 176 | raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url) |
177 | 177 | ||
178 | count = 0 | 178 | count = 0 |
179 | 179 | ||
@@ -191,7 +191,7 @@ class Perforce(FetchMethod): | |||
191 | 191 | ||
192 | if count == 0: | 192 | if count == 0: |
193 | logger.error() | 193 | logger.error() |
194 | raise FetchError("Fetch: No files gathered from the P4 fetch", loc) | 194 | raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url) |
195 | 195 | ||
196 | runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath]) | 196 | runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath]) |
197 | # cleanup | 197 | # cleanup |
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py index 8300da8c5a..21678eb7d9 100644 --- a/bitbake/lib/bb/fetch2/repo.py +++ b/bitbake/lib/bb/fetch2/repo.py | |||
@@ -31,7 +31,7 @@ from bb.fetch2 import runfetchcmd | |||
31 | 31 | ||
32 | class Repo(FetchMethod): | 32 | class Repo(FetchMethod): |
33 | """Class to fetch a module or modules from repo (git) repositories""" | 33 | """Class to fetch a module or modules from repo (git) repositories""" |
34 | def supports(self, url, ud, d): | 34 | def supports(self, ud, d): |
35 | """ | 35 | """ |
36 | Check to see if a given url can be fetched with repo. | 36 | Check to see if a given url can be fetched with repo. |
37 | """ | 37 | """ |
@@ -53,7 +53,7 @@ class Repo(FetchMethod): | |||
53 | 53 | ||
54 | ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) | 54 | ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) |
55 | 55 | ||
56 | def download(self, loc, ud, d): | 56 | def download(self, ud, d): |
57 | """Fetch url""" | 57 | """Fetch url""" |
58 | 58 | ||
59 | if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): | 59 | if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): |
@@ -91,8 +91,8 @@ class Repo(FetchMethod): | |||
91 | def supports_srcrev(self): | 91 | def supports_srcrev(self): |
92 | return False | 92 | return False |
93 | 93 | ||
94 | def _build_revision(self, url, ud, d): | 94 | def _build_revision(self, ud, d): |
95 | return ud.manifest | 95 | return ud.manifest |
96 | 96 | ||
97 | def _want_sortable_revision(self, url, ud, d): | 97 | def _want_sortable_revision(self, ud, d): |
98 | return False | 98 | return False |
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py index 5fbbcfdd90..8ea4ef2ff3 100644 --- a/bitbake/lib/bb/fetch2/sftp.py +++ b/bitbake/lib/bb/fetch2/sftp.py | |||
@@ -72,7 +72,7 @@ from bb.fetch2 import runfetchcmd | |||
72 | class SFTP(FetchMethod): | 72 | class SFTP(FetchMethod): |
73 | """Class to fetch urls via 'sftp'""" | 73 | """Class to fetch urls via 'sftp'""" |
74 | 74 | ||
75 | def supports(self, url, ud, d): | 75 | def supports(self, ud, d): |
76 | """ | 76 | """ |
77 | Check to see if a given url can be fetched with sftp. | 77 | Check to see if a given url can be fetched with sftp. |
78 | """ | 78 | """ |
@@ -95,10 +95,10 @@ class SFTP(FetchMethod): | |||
95 | 95 | ||
96 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) | 96 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) |
97 | 97 | ||
98 | def download(self, uri, ud, d): | 98 | def download(self, ud, d): |
99 | """Fetch urls""" | 99 | """Fetch urls""" |
100 | 100 | ||
101 | urlo = URI(uri) | 101 | urlo = URI(ud.url) |
102 | basecmd = 'sftp -oPasswordAuthentication=no' | 102 | basecmd = 'sftp -oPasswordAuthentication=no' |
103 | port = '' | 103 | port = '' |
104 | if urlo.port: | 104 | if urlo.port: |
@@ -124,6 +124,6 @@ class SFTP(FetchMethod): | |||
124 | cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote), | 124 | cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote), |
125 | commands.mkarg(lpath)) | 125 | commands.mkarg(lpath)) |
126 | 126 | ||
127 | bb.fetch2.check_network_access(d, cmd, uri) | 127 | bb.fetch2.check_network_access(d, cmd, ud.url) |
128 | runfetchcmd(cmd, d) | 128 | runfetchcmd(cmd, d) |
129 | return True | 129 | return True |
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py index 8b5acbf6db..4ae979472c 100644 --- a/bitbake/lib/bb/fetch2/ssh.py +++ b/bitbake/lib/bb/fetch2/ssh.py | |||
@@ -72,8 +72,8 @@ __pattern__ = re.compile(r''' | |||
72 | class SSH(FetchMethod): | 72 | class SSH(FetchMethod): |
73 | '''Class to fetch a module or modules via Secure Shell''' | 73 | '''Class to fetch a module or modules via Secure Shell''' |
74 | 74 | ||
75 | def supports(self, url, urldata, d): | 75 | def supports(self, urldata, d): |
76 | return __pattern__.match(url) != None | 76 | return __pattern__.match(urldata.url) != None |
77 | 77 | ||
78 | def supports_checksum(self, urldata): | 78 | def supports_checksum(self, urldata): |
79 | return False | 79 | return False |
@@ -89,10 +89,10 @@ class SSH(FetchMethod): | |||
89 | host = m.group('host') | 89 | host = m.group('host') |
90 | urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path)) | 90 | urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path)) |
91 | 91 | ||
92 | def download(self, url, urldata, d): | 92 | def download(self, urldata, d): |
93 | dldir = d.getVar('DL_DIR', True) | 93 | dldir = d.getVar('DL_DIR', True) |
94 | 94 | ||
95 | m = __pattern__.match(url) | 95 | m = __pattern__.match(urldata.url) |
96 | path = m.group('path') | 96 | path = m.group('path') |
97 | host = m.group('host') | 97 | host = m.group('host') |
98 | port = m.group('port') | 98 | port = m.group('port') |
diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py index ee3823f845..23631b43f4 100644 --- a/bitbake/lib/bb/fetch2/svk.py +++ b/bitbake/lib/bb/fetch2/svk.py | |||
@@ -37,7 +37,7 @@ from bb.fetch2 import runfetchcmd | |||
37 | 37 | ||
38 | class Svk(FetchMethod): | 38 | class Svk(FetchMethod): |
39 | """Class to fetch a module or modules from svk repositories""" | 39 | """Class to fetch a module or modules from svk repositories""" |
40 | def supports(self, url, ud, d): | 40 | def supports(self, ud, d): |
41 | """ | 41 | """ |
42 | Check to see if a given url can be fetched with svk. | 42 | Check to see if a given url can be fetched with svk. |
43 | """ | 43 | """ |
@@ -54,14 +54,14 @@ class Svk(FetchMethod): | |||
54 | 54 | ||
55 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | 55 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) |
56 | 56 | ||
57 | def need_update(self, url, ud, d): | 57 | def need_update(self, ud, d): |
58 | if ud.date == "now": | 58 | if ud.date == "now": |
59 | return True | 59 | return True |
60 | if not os.path.exists(ud.localpath): | 60 | if not os.path.exists(ud.localpath): |
61 | return True | 61 | return True |
62 | return False | 62 | return False |
63 | 63 | ||
64 | def download(self, loc, ud, d): | 64 | def download(self, ud, d): |
65 | """Fetch urls""" | 65 | """Fetch urls""" |
66 | 66 | ||
67 | svkroot = ud.host + ud.path | 67 | svkroot = ud.host + ud.path |
@@ -81,11 +81,11 @@ class Svk(FetchMethod): | |||
81 | tmpfile = tmpfile.strip() | 81 | tmpfile = tmpfile.strip() |
82 | if not tmpfile: | 82 | if not tmpfile: |
83 | logger.error() | 83 | logger.error() |
84 | raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) | 84 | raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) |
85 | 85 | ||
86 | # check out sources there | 86 | # check out sources there |
87 | os.chdir(tmpfile) | 87 | os.chdir(tmpfile) |
88 | logger.info("Fetch " + loc) | 88 | logger.info("Fetch " + ud.url) |
89 | logger.debug(1, "Running %s", svkcmd) | 89 | logger.debug(1, "Running %s", svkcmd) |
90 | runfetchcmd(svkcmd, d, cleanup = [tmpfile]) | 90 | runfetchcmd(svkcmd, d, cleanup = [tmpfile]) |
91 | 91 | ||
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py index 123aa136eb..8847461913 100644 --- a/bitbake/lib/bb/fetch2/svn.py +++ b/bitbake/lib/bb/fetch2/svn.py | |||
@@ -37,7 +37,7 @@ from bb.fetch2 import logger | |||
37 | 37 | ||
38 | class Svn(FetchMethod): | 38 | class Svn(FetchMethod): |
39 | """Class to fetch a module or modules from svn repositories""" | 39 | """Class to fetch a module or modules from svn repositories""" |
40 | def supports(self, url, ud, d): | 40 | def supports(self, ud, d): |
41 | """ | 41 | """ |
42 | Check to see if a given url can be fetched with svn. | 42 | Check to see if a given url can be fetched with svn. |
43 | """ | 43 | """ |
@@ -112,14 +112,14 @@ class Svn(FetchMethod): | |||
112 | 112 | ||
113 | return svncmd | 113 | return svncmd |
114 | 114 | ||
115 | def download(self, loc, ud, d): | 115 | def download(self, ud, d): |
116 | """Fetch url""" | 116 | """Fetch url""" |
117 | 117 | ||
118 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") | 118 | logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") |
119 | 119 | ||
120 | if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): | 120 | if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): |
121 | svnupdatecmd = self._buildsvncommand(ud, d, "update") | 121 | svnupdatecmd = self._buildsvncommand(ud, d, "update") |
122 | logger.info("Update " + loc) | 122 | logger.info("Update " + ud.url) |
123 | # update sources there | 123 | # update sources there |
124 | os.chdir(ud.moddir) | 124 | os.chdir(ud.moddir) |
125 | # We need to attempt to run svn upgrade first in case its an older working format | 125 | # We need to attempt to run svn upgrade first in case its an older working format |
@@ -132,7 +132,7 @@ class Svn(FetchMethod): | |||
132 | runfetchcmd(svnupdatecmd, d) | 132 | runfetchcmd(svnupdatecmd, d) |
133 | else: | 133 | else: |
134 | svnfetchcmd = self._buildsvncommand(ud, d, "fetch") | 134 | svnfetchcmd = self._buildsvncommand(ud, d, "fetch") |
135 | logger.info("Fetch " + loc) | 135 | logger.info("Fetch " + ud.url) |
136 | # check out sources there | 136 | # check out sources there |
137 | bb.utils.mkdirhier(ud.pkgdir) | 137 | bb.utils.mkdirhier(ud.pkgdir) |
138 | os.chdir(ud.pkgdir) | 138 | os.chdir(ud.pkgdir) |
@@ -160,13 +160,13 @@ class Svn(FetchMethod): | |||
160 | def supports_srcrev(self): | 160 | def supports_srcrev(self): |
161 | return True | 161 | return True |
162 | 162 | ||
163 | def _revision_key(self, url, ud, d, name): | 163 | def _revision_key(self, ud, d, name): |
164 | """ | 164 | """ |
165 | Return a unique key for the url | 165 | Return a unique key for the url |
166 | """ | 166 | """ |
167 | return "svn:" + ud.moddir | 167 | return "svn:" + ud.moddir |
168 | 168 | ||
169 | def _latest_revision(self, url, ud, d, name): | 169 | def _latest_revision(self, ud, d, name): |
170 | """ | 170 | """ |
171 | Return the latest upstream revision number | 171 | Return the latest upstream revision number |
172 | """ | 172 | """ |
@@ -180,12 +180,12 @@ class Svn(FetchMethod): | |||
180 | 180 | ||
181 | return revision | 181 | return revision |
182 | 182 | ||
183 | def sortable_revision(self, url, ud, d, name): | 183 | def sortable_revision(self, ud, d, name): |
184 | """ | 184 | """ |
185 | Return a sortable revision number which in our case is the revision number | 185 | Return a sortable revision number which in our case is the revision number |
186 | """ | 186 | """ |
187 | 187 | ||
188 | return False, self._build_revision(url, ud, d) | 188 | return False, self._build_revision(ud, d) |
189 | 189 | ||
190 | def _build_revision(self, url, ud, d): | 190 | def _build_revision(self, ud, d): |
191 | return ud.revision | 191 | return ud.revision |
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index 131016ce89..00732019ed 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -37,7 +37,7 @@ from bb.fetch2 import runfetchcmd | |||
37 | 37 | ||
38 | class Wget(FetchMethod): | 38 | class Wget(FetchMethod): |
39 | """Class to fetch urls via 'wget'""" | 39 | """Class to fetch urls via 'wget'""" |
40 | def supports(self, url, ud, d): | 40 | def supports(self, ud, d): |
41 | """ | 41 | """ |
42 | Check to see if a given url can be fetched with wget. | 42 | Check to see if a given url can be fetched with wget. |
43 | """ | 43 | """ |
@@ -58,7 +58,7 @@ class Wget(FetchMethod): | |||
58 | 58 | ||
59 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) | 59 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) |
60 | 60 | ||
61 | def download(self, uri, ud, d, checkonly = False): | 61 | def download(self, ud, d, checkonly = False): |
62 | """Fetch urls""" | 62 | """Fetch urls""" |
63 | 63 | ||
64 | basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" | 64 | basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" |
@@ -76,7 +76,7 @@ class Wget(FetchMethod): | |||
76 | else: | 76 | else: |
77 | fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'") | 77 | fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'") |
78 | 78 | ||
79 | uri = uri.split(";")[0] | 79 | uri = ud.url.split(";")[0] |
80 | 80 | ||
81 | fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) | 81 | fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) |
82 | fetchcmd = fetchcmd.replace("${FILE}", ud.basename) | 82 | fetchcmd = fetchcmd.replace("${FILE}", ud.basename) |
@@ -93,5 +93,5 @@ class Wget(FetchMethod): | |||
93 | 93 | ||
94 | return True | 94 | return True |
95 | 95 | ||
96 | def checkstatus(self, uri, ud, d): | 96 | def checkstatus(self, ud, d): |
97 | return self.download(uri, ud, d, True) | 97 | return self.download(ud, d, True) |