summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch
diff options
context:
space:
mode:
authorRichard Purdie <richard@openedhand.com>2006-11-16 15:02:15 +0000
committerRichard Purdie <richard@openedhand.com>2006-11-16 15:02:15 +0000
commit306b7c7a9757ead077363074e7bbac2e5c03e7c5 (patch)
tree6935017a9af749c46816881c86258f514384ba1c /bitbake/lib/bb/fetch
parent65930a38e415ae4a0182e1cea1be838e0ada50ee (diff)
downloadpoky-306b7c7a9757ead077363074e7bbac2e5c03e7c5.tar.gz
bitbake: Upgrade from 1.4 -> 1.7.4ish
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@863 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake/lib/bb/fetch')
-rw-r--r--bitbake/lib/bb/fetch/__init__.py167
-rw-r--r--bitbake/lib/bb/fetch/cvs.py255
-rw-r--r--bitbake/lib/bb/fetch/git.py144
-rw-r--r--bitbake/lib/bb/fetch/local.py18
-rw-r--r--bitbake/lib/bb/fetch/perforce.py213
-rw-r--r--bitbake/lib/bb/fetch/ssh.py94
-rw-r--r--bitbake/lib/bb/fetch/svk.py160
-rw-r--r--bitbake/lib/bb/fetch/svn.py203
-rw-r--r--bitbake/lib/bb/fetch/wget.py152
9 files changed, 728 insertions, 678 deletions
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index 7ab0590765..24aebc41ca 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -38,13 +38,16 @@ class NoMethodError(Exception):
38class MissingParameterError(Exception): 38class MissingParameterError(Exception):
39 """Exception raised when a fetch method is missing a critical parameter in the url""" 39 """Exception raised when a fetch method is missing a critical parameter in the url"""
40 40
41class ParameterError(Exception):
42 """Exception raised when a url cannot be proccessed due to invalid parameters."""
43
41class MD5SumError(Exception): 44class MD5SumError(Exception):
42 """Exception raised when a MD5SUM of a file does not match the expected one""" 45 """Exception raised when a MD5SUM of a file does not match the expected one"""
43 46
44def uri_replace(uri, uri_find, uri_replace, d): 47def uri_replace(uri, uri_find, uri_replace, d):
45# bb.note("uri_replace: operating on %s" % uri) 48# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
46 if not uri or not uri_find or not uri_replace: 49 if not uri or not uri_find or not uri_replace:
47 bb.debug(1, "uri_replace: passed an undefined value, not replacing") 50 bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
48 uri_decoded = list(bb.decodeurl(uri)) 51 uri_decoded = list(bb.decodeurl(uri))
49 uri_find_decoded = list(bb.decodeurl(uri_find)) 52 uri_find_decoded = list(bb.decodeurl(uri_find))
50 uri_replace_decoded = list(bb.decodeurl(uri_replace)) 53 uri_replace_decoded = list(bb.decodeurl(uri_replace))
@@ -62,9 +65,9 @@ def uri_replace(uri, uri_find, uri_replace, d):
62 localfn = bb.fetch.localpath(uri, d) 65 localfn = bb.fetch.localpath(uri, d)
63 if localfn: 66 if localfn:
64 result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) 67 result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
65# bb.note("uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) 68# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
66 else: 69 else:
67# bb.note("uri_replace: no match") 70# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
68 return uri 71 return uri
69# else: 72# else:
70# for j in i.keys(): 73# for j in i.keys():
@@ -72,62 +75,94 @@ def uri_replace(uri, uri_find, uri_replace, d):
72 return bb.encodeurl(result_decoded) 75 return bb.encodeurl(result_decoded)
73 76
74methods = [] 77methods = []
78urldata = {}
75 79
76def init(urls = [], d = None): 80def init(urls = [], d = None):
77 if d == None: 81 if d == None:
78 bb.debug(2,"BUG init called with None as data object!!!") 82 bb.msg.debug(2, bb.msg.domain.Fetcher, "BUG init called with None as data object!!!")
79 return 83 return
80 84
81 for m in methods: 85 for m in methods:
82 m.urls = [] 86 m.urls = []
83 87
84 for u in urls: 88 for u in urls:
89 ud = initdata(u, d)
90 if ud.method:
91 ud.method.urls.append(u)
92
93def initdata(url, d):
94 if url not in urldata:
95 ud = FetchData()
96 (ud.type, ud.host, ud.path, ud.user, ud.pswd, ud.parm) = bb.decodeurl(data.expand(url, d))
97 ud.date = Fetch.getSRCDate(d)
85 for m in methods: 98 for m in methods:
86 m.data = d 99 if m.supports(url, ud, d):
87 if m.supports(u, d): 100 ud.localpath = m.localpath(url, ud, d)
88 m.urls.append(u) 101 ud.md5 = ud.localpath + '.md5'
102 # if user sets localpath for file, use it instead.
103 if "localpath" in ud.parm:
104 ud.localpath = ud.parm["localpath"]
105 ud.method = m
106 break
107 urldata[url] = ud
108 return urldata[url]
89 109
90def go(d): 110def go(d):
91 """Fetch all urls""" 111 """Fetch all urls"""
92 for m in methods: 112 for m in methods:
93 if m.urls: 113 for u in m.urls:
94 m.go(d) 114 ud = urldata[u]
115 if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(urldata[u].md5):
116 # File already present along with md5 stamp file
117 # Touch md5 file to show activity
118 os.utime(ud.md5, None)
119 continue
120 # RP - is olddir needed?
121 # olddir = os.path.abspath(os.getcwd())
122 m.go(u, ud , d)
123 # os.chdir(olddir)
124 if ud.localfile and not m.forcefetch(u, ud, d):
125 Fetch.write_md5sum(u, ud, d)
95 126
96def localpaths(d): 127def localpaths(d):
97 """Return a list of the local filenames, assuming successful fetch""" 128 """Return a list of the local filenames, assuming successful fetch"""
98 local = [] 129 local = []
99 for m in methods: 130 for m in methods:
100 for u in m.urls: 131 for u in m.urls:
101 local.append(m.localpath(u, d)) 132 local.append(urldata[u].localpath)
102 return local 133 return local
103 134
104def localpath(url, d): 135def localpath(url, d):
105 for m in methods: 136 ud = initdata(url, d)
106 if m.supports(url, d): 137 if ud.method:
107 return m.localpath(url, d) 138 return ud.localpath
108 return url 139 return url
109 140
141class FetchData(object):
142 """Class for fetcher variable store"""
143 def __init__(self):
144 self.localfile = ""
145
146
110class Fetch(object): 147class Fetch(object):
111 """Base class for 'fetch'ing data""" 148 """Base class for 'fetch'ing data"""
112 149
113 def __init__(self, urls = []): 150 def __init__(self, urls = []):
114 self.urls = [] 151 self.urls = []
115 for url in urls:
116 if self.supports(bb.decodeurl(url), d) is 1:
117 self.urls.append(url)
118 152
119 def supports(url, d): 153 def supports(self, url, urldata, d):
120 """Check to see if this fetch class supports a given url. 154 """
121 Expects supplied url in list form, as outputted by bb.decodeurl(). 155 Check to see if this fetch class supports a given url.
122 """ 156 """
123 return 0 157 return 0
124 supports = staticmethod(supports)
125 158
126 def localpath(url, d): 159 def localpath(self, url, urldata, d):
127 """Return the local filename of a given url assuming a successful fetch. 160 """
161 Return the local filename of a given url assuming a successful fetch.
162 Can also setup variables in urldata for use in go (saving code duplication
163 and duplicate code execution)
128 """ 164 """
129 return url 165 return url
130 localpath = staticmethod(localpath)
131 166
132 def setUrls(self, urls): 167 def setUrls(self, urls):
133 self.__urls = urls 168 self.__urls = urls
@@ -137,16 +172,17 @@ class Fetch(object):
137 172
138 urls = property(getUrls, setUrls, None, "Urls property") 173 urls = property(getUrls, setUrls, None, "Urls property")
139 174
140 def setData(self, data): 175 def forcefetch(self, url, urldata, d):
141 self.__data = data 176 """
142 177 Force a fetch, even if localpath exists?
143 def getData(self): 178 """
144 return self.__data 179 return False
145
146 data = property(getData, setData, None, "Data property")
147 180
148 def go(self, urls = []): 181 def go(self, url, urldata, d):
149 """Fetch urls""" 182 """
183 Fetch urls
184 Assumes localpath was called first
185 """
150 raise NoMethodError("Missing implementation for url") 186 raise NoMethodError("Missing implementation for url")
151 187
152 def getSRCDate(d): 188 def getSRCDate(d):
@@ -155,7 +191,12 @@ class Fetch(object):
155 191
156 d the bb.data module 192 d the bb.data module
157 """ 193 """
158 return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1 ) 194 pn = data.getVar("PN", d, 1)
195
196 if pn:
197 return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("DATE", d, 1)
198
199 return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
159 getSRCDate = staticmethod(getSRCDate) 200 getSRCDate = staticmethod(getSRCDate)
160 201
161 def try_mirror(d, tarfn): 202 def try_mirror(d, tarfn):
@@ -168,6 +209,11 @@ class Fetch(object):
168 d Is a bb.data instance 209 d Is a bb.data instance
169 tarfn is the name of the tarball 210 tarfn is the name of the tarball
170 """ 211 """
212 tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
213 if os.access(tarpath, os.R_OK):
214 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn)
215 return True
216
171 pn = data.getVar('PN', d, True) 217 pn = data.getVar('PN', d, True)
172 src_tarball_stash = None 218 src_tarball_stash = None
173 if pn: 219 if pn:
@@ -176,36 +222,45 @@ class Fetch(object):
176 for stash in src_tarball_stash: 222 for stash in src_tarball_stash:
177 fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True) 223 fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True)
178 uri = stash + tarfn 224 uri = stash + tarfn
179 bb.note("fetch " + uri) 225 bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
180 fetchcmd = fetchcmd.replace("${URI}", uri) 226 fetchcmd = fetchcmd.replace("${URI}", uri)
181 ret = os.system(fetchcmd) 227 ret = os.system(fetchcmd)
182 if ret == 0: 228 if ret == 0:
183 bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) 229 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetched %s from tarball stash, skipping checkout" % tarfn)
184 return True 230 return True
185 return False 231 return False
186 try_mirror = staticmethod(try_mirror) 232 try_mirror = staticmethod(try_mirror)
187 233
188 def check_for_tarball(d, tarfn, dldir, date): 234 def verify_md5sum(ud, got_sum):
189 """ 235 """
190 Check for a local copy then check the tarball stash. 236 Verify the md5sum we wanted with the one we got
191 Both checks are skipped if date == 'now'.
192
193 d Is a bb.data instance
194 tarfn is the name of the tarball
195 date is the SRCDATE
196 """ 237 """
197 if "now" != date: 238 wanted_sum = None
198 dl = os.path.join(dldir, tarfn) 239 if 'md5sum' in ud.parm:
199 if os.access(dl, os.R_OK): 240 wanted_sum = ud.parm['md5sum']
200 bb.debug(1, "%s already exists, skipping checkout." % tarfn) 241 if not wanted_sum:
201 return True 242 return True
202 243
203 # try to use the tarball stash 244 return wanted_sum == got_sum
204 if Fetch.try_mirror(d, tarfn): 245 verify_md5sum = staticmethod(verify_md5sum)
205 return True 246
206 return False 247 def write_md5sum(url, ud, d):
207 check_for_tarball = staticmethod(check_for_tarball) 248 if bb.which(data.getVar('PATH', d), 'md5sum'):
208 249 try:
250 md5pipe = os.popen('md5sum ' + ud.localpath)
251 md5data = (md5pipe.readline().split() or [ "" ])[0]
252 md5pipe.close()
253 except OSError:
254 md5data = ""
255
256 # verify the md5sum
257 if not Fetch.verify_md5sum(ud, md5data):
258 raise MD5SumError(url)
259
260 md5out = file(ud.md5, 'w')
261 md5out.write(md5data)
262 md5out.close()
263 write_md5sum = staticmethod(write_md5sum)
209 264
210import cvs 265import cvs
211import git 266import git
@@ -214,6 +269,7 @@ import svn
214import wget 269import wget
215import svk 270import svk
216import ssh 271import ssh
272import perforce
217 273
218methods.append(cvs.Cvs()) 274methods.append(cvs.Cvs())
219methods.append(git.Git()) 275methods.append(git.Git())
@@ -222,3 +278,4 @@ methods.append(svn.Svn())
222methods.append(wget.Wget()) 278methods.append(wget.Wget())
223methods.append(svk.Svk()) 279methods.append(svk.Svk())
224methods.append(ssh.SSH()) 280methods.append(ssh.SSH())
281methods.append(perforce.Perforce())
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py
index 0b2477560a..3bdac177eb 100644
--- a/bitbake/lib/bb/fetch/cvs.py
+++ b/bitbake/lib/bb/fetch/cvs.py
@@ -33,164 +33,119 @@ from bb.fetch import FetchError
33from bb.fetch import MissingParameterError 33from bb.fetch import MissingParameterError
34 34
35class Cvs(Fetch): 35class Cvs(Fetch):
36 """Class to fetch a module or modules from cvs repositories""" 36 """
37 def supports(url, d): 37 Class to fetch a module or modules from cvs repositories
38 """Check to see if a given url can be fetched with cvs. 38 """
39 Expects supplied url in list form, as outputted by bb.decodeurl(). 39 def supports(self, url, ud, d):
40 """ 40 """
41 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) 41 Check to see if a given url can be fetched with cvs.
42 return type in ['cvs', 'pserver'] 42 """
43 supports = staticmethod(supports) 43 return ud.type in ['cvs', 'pserver']
44
45 def localpath(url, d):
46 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
47 if "localpath" in parm:
48# if user overrides local path, use it.
49 return parm["localpath"]
50 44
51 if not "module" in parm: 45 def localpath(self, url, ud, d):
46 if not "module" in ud.parm:
52 raise MissingParameterError("cvs method needs a 'module' parameter") 47 raise MissingParameterError("cvs method needs a 'module' parameter")
53 else: 48 ud.module = ud.parm["module"]
54 module = parm["module"] 49
55 if 'tag' in parm: 50 ud.tag = ""
56 tag = parm['tag'] 51 if 'tag' in ud.parm:
57 else: 52 ud.tag = ud.parm['tag']
58 tag = "" 53
59 if 'date' in parm: 54 # Override the default date in certain cases
60 date = parm['date'] 55 if 'date' in ud.parm:
61 else: 56 ud.date = ud.parm['date']
62 if not tag: 57 elif ud.tag:
63 date = Fetch.getSRCDate(d) 58 ud.date = ""
64 else: 59
65 date = "" 60 ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date), d)
61
62 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
66 63
67 return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, tag, date), d)) 64 def forcefetch(self, url, ud, d):
68 localpath = staticmethod(localpath) 65 if (ud.date == "now"):
66 return True
67 return False
69 68
70 def go(self, d, urls = []): 69 def go(self, loc, ud, d):
71 """Fetch urls""" 70
72 if not urls: 71 # try to use the tarball stash
73 urls = self.urls 72 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
73 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath)
74 return
75
76 method = "pserver"
77 if "method" in ud.parm:
78 method = ud.parm["method"]
79
80 localdir = ud.module
81 if "localdir" in ud.parm:
82 localdir = ud.parm["localdir"]
83
84 cvs_rsh = None
85 if method == "ext":
86 if "rsh" in ud.parm:
87 cvs_rsh = ud.parm["rsh"]
88
89 if method == "dir":
90 cvsroot = ud.path
91 else:
92 cvsroot = ":" + method + ":" + ud.user
93 if ud.pswd:
94 cvsroot += ":" + ud.pswd
95 cvsroot += "@" + ud.host + ":" + ud.path
96
97 options = []
98 if ud.date:
99 options.append("-D %s" % ud.date)
100 if ud.tag:
101 options.append("-r %s" % ud.tag)
74 102
75 localdata = data.createCopy(d) 103 localdata = data.createCopy(d)
76 data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) 104 data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
77 data.update_data(localdata) 105 data.update_data(localdata)
78 106
79 for loc in urls: 107 data.setVar('CVSROOT', cvsroot, localdata)
80 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) 108 data.setVar('CVSCOOPTS', " ".join(options), localdata)
81 if not "module" in parm: 109 data.setVar('CVSMODULE', ud.module, localdata)
82 raise MissingParameterError("cvs method needs a 'module' parameter") 110 cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
83 else: 111 cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
84 module = parm["module"] 112
85 113 if cvs_rsh:
86 dlfile = self.localpath(loc, localdata) 114 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
87 dldir = data.getVar('DL_DIR', localdata, 1) 115 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
88# if local path contains the cvs 116
89# module, consider the dir above it to be the 117 # create module directory
90# download directory 118 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory")
91# pos = dlfile.find(module) 119 pkg = data.expand('${PN}', d)
92# if pos: 120 pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
93# dldir = dlfile[:pos] 121 moddir = os.path.join(pkgdir,localdir)
94# else: 122 if os.access(os.path.join(moddir,'CVS'), os.R_OK):
95# dldir = os.path.dirname(dlfile) 123 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
96 124 # update sources there
97# setup cvs options
98 options = []
99 if 'tag' in parm:
100 tag = parm['tag']
101 else:
102 tag = ""
103
104 if 'date' in parm:
105 date = parm['date']
106 else:
107 if not tag:
108 date = Fetch.getSRCDate(d)
109 else:
110 date = ""
111
112 if "method" in parm:
113 method = parm["method"]
114 else:
115 method = "pserver"
116
117 if "localdir" in parm:
118 localdir = parm["localdir"]
119 else:
120 localdir = module
121
122 cvs_rsh = None
123 if method == "ext":
124 if "rsh" in parm:
125 cvs_rsh = parm["rsh"]
126
127 tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata)
128 data.setVar('TARFILES', dlfile, localdata)
129 data.setVar('TARFN', tarfn, localdata)
130
131 if Fetch.check_for_tarball(d, tarfn, dldir, date):
132 continue
133
134 if date:
135 options.append("-D %s" % date)
136 if tag:
137 options.append("-r %s" % tag)
138
139 olddir = os.path.abspath(os.getcwd())
140 os.chdir(data.expand(dldir, localdata))
141
142# setup cvsroot
143 if method == "dir":
144 cvsroot = path
145 else:
146 cvsroot = ":" + method + ":" + user
147 if pswd:
148 cvsroot += ":" + pswd
149 cvsroot += "@" + host + ":" + path
150
151 data.setVar('CVSROOT', cvsroot, localdata)
152 data.setVar('CVSCOOPTS', " ".join(options), localdata)
153 data.setVar('CVSMODULE', module, localdata)
154 cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
155 cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
156
157 if cvs_rsh:
158 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
159 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
160
161# create module directory
162 bb.debug(2, "Fetch: checking for module directory")
163 pkg=data.expand('${PN}', d)
164 pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg)
165 moddir=os.path.join(pkgdir,localdir)
166 if os.access(os.path.join(moddir,'CVS'), os.R_OK):
167 bb.note("Update " + loc)
168# update sources there
169 os.chdir(moddir)
170 myret = os.system(cvsupdatecmd)
171 else:
172 bb.note("Fetch " + loc)
173# check out sources there
174 bb.mkdirhier(pkgdir)
175 os.chdir(pkgdir)
176 bb.debug(1, "Running %s" % cvscmd)
177 myret = os.system(cvscmd)
178
179 if myret != 0 or not os.access(moddir, os.R_OK):
180 try:
181 os.rmdir(moddir)
182 except OSError:
183 pass
184 raise FetchError(module)
185
186 os.chdir(moddir) 125 os.chdir(moddir)
187 os.chdir('..') 126 myret = os.system(cvsupdatecmd)
188# tar them up to a defined filename 127 else:
189 myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir))) 128 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
190 if myret != 0: 129 # check out sources there
191 try: 130 bb.mkdirhier(pkgdir)
192 os.unlink(tarfn) 131 os.chdir(pkgdir)
193 except OSError: 132 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd)
194 pass 133 myret = os.system(cvscmd)
195 os.chdir(olddir) 134
196 del localdata 135 if myret != 0 or not os.access(moddir, os.R_OK):
136 try:
137 os.rmdir(moddir)
138 except OSError:
139 pass
140 raise FetchError(ud.module)
141
142 os.chdir(moddir)
143 os.chdir('..')
144 # tar them up to a defined filename
145 myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir)))
146 if myret != 0:
147 try:
148 os.unlink(ud.localpath)
149 except OSError:
150 pass
151 raise FetchError(ud.module)
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
index 49235c141e..75a7629223 100644
--- a/bitbake/lib/bb/fetch/git.py
+++ b/bitbake/lib/bb/fetch/git.py
@@ -37,7 +37,7 @@ def prunedir(topdir):
37 37
38def rungitcmd(cmd,d): 38def rungitcmd(cmd,d):
39 39
40 bb.debug(1, "Running %s" % cmd) 40 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
41 41
42 # Need to export PATH as git is likely to be in metadata paths 42 # Need to export PATH as git is likely to be in metadata paths
43 # rather than host provided 43 # rather than host provided
@@ -48,108 +48,80 @@ def rungitcmd(cmd,d):
48 if myret != 0: 48 if myret != 0:
49 raise FetchError("Git: %s failed" % pathcmd) 49 raise FetchError("Git: %s failed" % pathcmd)
50 50
51def gettag(parm):
52 if 'tag' in parm:
53 tag = parm['tag']
54 else:
55 tag = ""
56 if not tag:
57 tag = "master"
58
59 return tag
60
61def getprotocol(parm):
62 if 'protocol' in parm:
63 proto = parm['protocol']
64 else:
65 proto = ""
66 if not proto:
67 proto = "rsync"
68
69 return proto
70
71def localfile(url, d):
72 """Return the filename to cache the checkout in"""
73 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
74
75 #if user sets localpath for file, use it instead.
76 if "localpath" in parm:
77 return parm["localpath"]
78
79 tag = gettag(parm)
80
81 return data.expand('git_%s%s_%s.tar.gz' % (host, path.replace('/', '.'), tag), d)
82
83class Git(Fetch): 51class Git(Fetch):
84 """Class to fetch a module or modules from git repositories""" 52 """Class to fetch a module or modules from git repositories"""
85 def supports(url, d): 53 def supports(self, url, ud, d):
86 """Check to see if a given url can be fetched with cvs. 54 """
87 Expects supplied url in list form, as outputted by bb.decodeurl(). 55 Check to see if a given url can be fetched with cvs.
88 """ 56 """
89 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) 57 return ud.type in ['git']
90 return type in ['git']
91 supports = staticmethod(supports)
92 58
93 def localpath(url, d): 59 def localpath(self, url, ud, d):
94 60
95 return os.path.join(data.getVar("DL_DIR", d, 1), localfile(url, d)) 61 ud.proto = "rsync"
62 if 'protocol' in ud.parm:
63 ud.proto = ud.parm['protocol']
96 64
97 localpath = staticmethod(localpath) 65 ud.tag = "master"
66 if 'tag' in ud.parm:
67 ud.tag = ud.parm['tag']
98 68
99 def go(self, d, urls = []): 69 ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d)
100 """Fetch urls"""
101 if not urls:
102 urls = self.urls
103 70
104 for loc in urls: 71 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
105 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d))
106 72
107 tag = gettag(parm) 73 def forcefetch(self, url, ud, d):
108 proto = getprotocol(parm) 74 # tag=="master" must always update
75 if (ud.tag == "master"):
76 return True
77 return False
109 78
110 gitsrcname = '%s%s' % (host, path.replace('/', '.')) 79 def go(self, loc, ud, d):
80 """Fetch url"""
111 81
112 repofilename = 'git_%s.tar.gz' % (gitsrcname) 82 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
113 repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) 83 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath)
114 repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) 84 return
115 85
116 coname = '%s' % (tag) 86 gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
117 codir = os.path.join(repodir, coname)
118 87
119 cofile = self.localpath(loc, d) 88 repofilename = 'git_%s.tar.gz' % (gitsrcname)
89 repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
90 repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
120 91
121 # tag=="master" must always update 92 coname = '%s' % (ud.tag)
122 if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)): 93 codir = os.path.join(repodir, coname)
123 bb.debug(1, "%s already exists (or was stashed). Skipping git checkout." % cofile)
124 continue
125 94
126 if not os.path.exists(repodir): 95 if not os.path.exists(repodir):
127 if Fetch.try_mirror(d, repofilename): 96 if Fetch.try_mirror(d, repofilename):
128 bb.mkdirhier(repodir) 97 bb.mkdirhier(repodir)
129 os.chdir(repodir) 98 os.chdir(repodir)
130 rungitcmd("tar -xzf %s" % (repofile),d) 99 rungitcmd("tar -xzf %s" % (repofile),d)
131 else: 100 else:
132 rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d) 101 rungitcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir),d)
133 102
134 os.chdir(repodir) 103 os.chdir(repodir)
135 rungitcmd("git pull %s://%s%s" % (proto, host, path),d) 104 rungitcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path),d)
136 rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d) 105 rungitcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path),d)
137 rungitcmd("git prune-packed", d) 106 rungitcmd("git prune-packed", d)
138 # old method of downloading tags 107 rungitcmd("git pack-redundant --all | xargs -r rm", d)
139 #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d) 108 # Remove all but the .git directory
109 rungitcmd("rm * -Rf", d)
110 # old method of downloading tags
111 #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")),d)
140 112
141 os.chdir(repodir) 113 os.chdir(repodir)
142 bb.note("Creating tarball of git repository") 114 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
143 rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) 115 rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d)
144 116
145 if os.path.exists(codir): 117 if os.path.exists(codir):
146 prunedir(codir) 118 prunedir(codir)
147 119
148 bb.mkdirhier(codir) 120 bb.mkdirhier(codir)
149 os.chdir(repodir) 121 os.chdir(repodir)
150 rungitcmd("git read-tree %s" % (tag),d) 122 rungitcmd("git read-tree %s" % (ud.tag),d)
151 rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) 123 rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d)
152 124
153 os.chdir(codir) 125 os.chdir(codir)
154 bb.note("Creating tarball of git checkout") 126 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
155 rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d) 127 rungitcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ),d)
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py
index 51938f823e..5224976704 100644
--- a/bitbake/lib/bb/fetch/local.py
+++ b/bitbake/lib/bb/fetch/local.py
@@ -31,15 +31,13 @@ from bb import data
31from bb.fetch import Fetch 31from bb.fetch import Fetch
32 32
33class Local(Fetch): 33class Local(Fetch):
34 def supports(url, d): 34 def supports(self, url, urldata, d):
35 """Check to see if a given url can be fetched in the local filesystem.
36 Expects supplied url in list form, as outputted by bb.decodeurl().
37 """ 35 """
38 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) 36 Check to see if a given url can be fetched with cvs.
39 return type in ['file','patch'] 37 """
40 supports = staticmethod(supports) 38 return urldata.type in ['file','patch']
41 39
42 def localpath(url, d): 40 def localpath(self, url, urldata, d):
43 """Return the local filename of a given url assuming a successful fetch. 41 """Return the local filename of a given url assuming a successful fetch.
44 """ 42 """
45 path = url.split("://")[1] 43 path = url.split("://")[1]
@@ -52,10 +50,10 @@ class Local(Fetch):
52 filesdir = data.getVar('FILESDIR', d, 1) 50 filesdir = data.getVar('FILESDIR', d, 1)
53 if filesdir: 51 if filesdir:
54 newpath = os.path.join(filesdir, path) 52 newpath = os.path.join(filesdir, path)
53 # We don't set localfile as for this fetcher the file is already local!
55 return newpath 54 return newpath
56 localpath = staticmethod(localpath)
57 55
58 def go(self, urls = []): 56 def go(self, url, urldata, d):
59 """Fetch urls (no-op for Local method)""" 57 """Fetch urls (no-op for Local method)"""
60# no need to fetch local files, we'll deal with them in place. 58 # no need to fetch local files, we'll deal with them in place.
61 return 1 59 return 1
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py
new file mode 100644
index 0000000000..88acf69951
--- /dev/null
+++ b/bitbake/lib/bb/fetch/perforce.py
@@ -0,0 +1,213 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake 'Fetch' implementations
6
7Classes for obtaining upstream sources for the
8BitBake build tools.
9
10Copyright (C) 2003, 2004 Chris Larson
11
12This program is free software; you can redistribute it and/or modify it under
13the terms of the GNU General Public License as published by the Free Software
14Foundation; either version 2 of the License, or (at your option) any later
15version.
16
17This program is distributed in the hope that it will be useful, but WITHOUT
18ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
19FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
20
21You should have received a copy of the GNU General Public License along with
22this program; if not, write to the Free Software Foundation, Inc., 59 Temple
23Place, Suite 330, Boston, MA 02111-1307 USA.
24
25Based on functions from the base bb module, Copyright 2003 Holger Schurig
26"""
27
28import os, re
29import bb
30from bb import data
31from bb.fetch import Fetch
32from bb.fetch import FetchError
33from bb.fetch import MissingParameterError
34
35class Perforce(Fetch):
36 def supports(self, url, ud, d):
37 return ud.type in ['p4']
38
39 def doparse(url,d):
40 parm=[]
41 path = url.split("://")[1]
42 delim = path.find("@");
43 if delim != -1:
44 (user,pswd,host,port) = path.split('@')[0].split(":")
45 path = path.split('@')[1]
46 else:
47 (host,port) = data.getVar('P4PORT', d).split(':')
48 user = ""
49 pswd = ""
50
51 if path.find(";") != -1:
52 keys=[]
53 values=[]
54 plist = path.split(';')
55 for item in plist:
56 if item.count('='):
57 (key,value) = item.split('=')
58 keys.append(key)
59 values.append(value)
60
61 parm = dict(zip(keys,values))
62 path = "//" + path.split(';')[0]
63 host += ":%s" % (port)
64 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
65
66 return host,path,user,pswd,parm
67 doparse = staticmethod(doparse)
68
69 def getcset(d, depot,host,user,pswd,parm):
70 if "cset" in parm:
71 return parm["cset"];
72 if user:
73 data.setVar('P4USER', user, d)
74 if pswd:
75 data.setVar('P4PASSWD', pswd, d)
76 if host:
77 data.setVar('P4PORT', host, d)
78
79 p4date = data.getVar("P4DATE", d, 1)
80 if "revision" in parm:
81 depot += "#%s" % (parm["revision"])
82 elif "label" in parm:
83 depot += "@%s" % (parm["label"])
84 elif p4date:
85 depot += "@%s" % (p4date)
86
87 p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
88 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s changes -m 1 %s" % (p4cmd, depot))
89 p4file = os.popen("%s changes -m 1 %s" % (p4cmd,depot))
90 cset = p4file.readline().strip()
91 bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
92 if not cset:
93 return -1
94
95 return cset.split(' ')[1]
96 getcset = staticmethod(getcset)
97
98 def localpath(self, url, ud, d):
99
100 (host,path,user,pswd,parm) = Perforce.doparse(url,d)
101
102 # If a label is specified, we use that as our filename
103
104 if "label" in parm:
105 ud.localfile = "%s.tar.gz" % (parm["label"])
106 return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
107
108 base = path
109 which = path.find('/...')
110 if which != -1:
111 base = path[:which]
112
113 if base[0] == "/":
114 base = base[1:]
115
116 cset = Perforce.getcset(d, path, host, user, pswd, parm)
117
118 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host,base.replace('/', '.'), cset), d)
119
120 return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
121
122 def go(self, loc, ud, d):
123 """
124 Fetch urls
125 """
126
127 # try to use the tarball stash
128 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
129 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath)
130 return
131
132 (host,depot,user,pswd,parm) = Perforce.doparse(loc, d)
133
134 if depot.find('/...') != -1:
135 path = depot[:depot.find('/...')]
136 else:
137 path = depot
138
139 if "module" in parm:
140 module = parm["module"]
141 else:
142 module = os.path.basename(path)
143
144 localdata = data.createCopy(d)
145 data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
146 data.update_data(localdata)
147
148 # Get the p4 command
149 if user:
150 data.setVar('P4USER', user, localdata)
151
152 if pswd:
153 data.setVar('P4PASSWD', pswd, localdata)
154
155 if host:
156 data.setVar('P4PORT', host, localdata)
157
158 p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
159
160 # create temp directory
161 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
162 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
163 data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
164 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
165 tmpfile = tmppipe.readline().strip()
166 if not tmpfile:
167 bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
168 raise FetchError(module)
169
170 if "label" in parm:
171 depot = "%s@%s" % (depot,parm["label"])
172 else:
173 cset = Perforce.getcset(d, depot, host, user, pswd, parm)
174 depot = "%s@%s" % (depot,cset)
175
176 os.chdir(tmpfile)
177 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
178 bb.msg.note(1, bb.msg.domain.Fetcher, "%s files %s" % (p4cmd, depot))
179 p4file = os.popen("%s files %s" % (p4cmd, depot))
180
181 if not p4file:
182 bb.error("Fetch: unable to get the P4 files from %s" % (depot))
183 raise FetchError(module)
184
185 count = 0
186
187 for file in p4file:
188 list = file.split()
189
190 if list[2] == "delete":
191 continue
192
193 dest = list[0][len(path)+1:]
194 where = dest.find("#")
195
196 os.system("%s print -o %s/%s %s" % (p4cmd, module,dest[:where],list[0]))
197 count = count + 1
198
199 if count == 0:
200 bb.error("Fetch: No files gathered from the P4 fetch")
201 raise FetchError(module)
202
203 myret = os.system("tar -czf %s %s" % (ud.localpath, module))
204 if myret != 0:
205 try:
206 os.unlink(ud.localpath)
207 except OSError:
208 pass
209 raise FetchError(module)
210 # cleanup
211 os.system('rm -rf %s' % tmpfile)
212
213
diff --git a/bitbake/lib/bb/fetch/ssh.py b/bitbake/lib/bb/fetch/ssh.py
index 57874d5ba9..e5f69e33e7 100644
--- a/bitbake/lib/bb/fetch/ssh.py
+++ b/bitbake/lib/bb/fetch/ssh.py
@@ -64,59 +64,55 @@ __pattern__ = re.compile(r'''
64class SSH(Fetch): 64class SSH(Fetch):
65 '''Class to fetch a module or modules via Secure Shell''' 65 '''Class to fetch a module or modules via Secure Shell'''
66 66
67 def supports(self, url, d): 67 def supports(self, url, urldata, d):
68 return __pattern__.match(url) != None 68 return __pattern__.match(url) != None
69 69
70 def localpath(self, url, d): 70 def localpath(self, url, urldata, d):
71 m = __pattern__.match(url) 71 m = __pattern__.match(url)
72 path = m.group('path') 72 path = m.group('path')
73 host = m.group('host') 73 host = m.group('host')
74 lpath = os.path.join(data.getVar('DL_DIR', d, 1), host, os.path.basename(path)) 74 lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
75 return lpath 75 return lpath
76 76
77 def go(self, d, urls = []): 77 def go(self, url, urldata, d):
78 if not urls: 78 dldir = data.getVar('DL_DIR', d, 1)
79 urls = self.urls 79
80 80 m = __pattern__.match(url)
81 for url in urls: 81 path = m.group('path')
82 dldir = data.getVar('DL_DIR', d, 1) 82 host = m.group('host')
83 83 port = m.group('port')
84 m = __pattern__.match(url) 84 user = m.group('user')
85 path = m.group('path') 85 password = m.group('pass')
86 host = m.group('host') 86
87 port = m.group('port') 87 ldir = os.path.join(dldir, host)
88 user = m.group('user') 88 lpath = os.path.join(ldir, os.path.basename(path))
89 password = m.group('pass') 89
90 90 if not os.path.exists(ldir):
91 ldir = os.path.join(dldir, host) 91 os.makedirs(ldir)
92 lpath = os.path.join(ldir, os.path.basename(path)) 92
93 93 if port:
94 if not os.path.exists(ldir): 94 port = '-P %s' % port
95 os.makedirs(ldir) 95 else:
96 96 port = ''
97 if port: 97
98 port = '-P %s' % port 98 if user:
99 else: 99 fr = user
100 port = '' 100 if password:
101 101 fr += ':%s' % password
102 if user: 102 fr += '@%s' % host
103 fr = user 103 else:
104 if password: 104 fr = host
105 fr += ':%s' % password 105 fr += ':%s' % path
106 fr += '@%s' % host 106
107 else: 107
108 fr = host 108 import commands
109 fr += ':%s' % path 109 cmd = 'scp -B -r %s %s %s/' % (
110 110 port,
111 111 commands.mkarg(fr),
112 import commands 112 commands.mkarg(ldir)
113 cmd = 'scp -B -r %s %s %s/' % ( 113 )
114 port, 114
115 commands.mkarg(fr), 115 (exitstatus, output) = commands.getstatusoutput(cmd)
116 commands.mkarg(ldir) 116 if exitstatus != 0:
117 ) 117 print output
118 118 raise FetchError('Unable to fetch %s' % url)
119 (exitstatus, output) = commands.getstatusoutput(cmd)
120 if exitstatus != 0:
121 print output
122 raise FetchError('Unable to fetch %s' % url)
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py
index 19103213cd..29270ab3d8 100644
--- a/bitbake/lib/bb/fetch/svk.py
+++ b/bitbake/lib/bb/fetch/svk.py
@@ -42,112 +42,76 @@ from bb.fetch import MissingParameterError
42 42
43class Svk(Fetch): 43class Svk(Fetch):
44 """Class to fetch a module or modules from svk repositories""" 44 """Class to fetch a module or modules from svk repositories"""
45 def supports(url, d): 45 def supports(self, url, ud, d):
46 """Check to see if a given url can be fetched with svk.
47 Expects supplied url in list form, as outputted by bb.decodeurl().
48 """ 46 """
49 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) 47 Check to see if a given url can be fetched with cvs.
50 return type in ['svk'] 48 """
51 supports = staticmethod(supports) 49 return ud.type in ['svk']
52
53 def localpath(url, d):
54 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
55 if "localpath" in parm:
56# if user overrides local path, use it.
57 return parm["localpath"]
58 50
59 if not "module" in parm: 51 def localpath(self, url, ud, d):
52 if not "module" in ud.parm:
60 raise MissingParameterError("svk method needs a 'module' parameter") 53 raise MissingParameterError("svk method needs a 'module' parameter")
61 else: 54 else:
62 module = parm["module"] 55 ud.module = ud.parm["module"]
63 if 'rev' in parm: 56
64 revision = parm['rev'] 57 ud.revision = ""
65 else: 58 if 'rev' in ud.parm:
66 revision = "" 59 ud.revision = ud.parm['rev']
60
61 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
67 62
68 date = Fetch.getSRCDate(d) 63 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
69 64
70 return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d)) 65 def forcefetch(self, url, ud, d):
71 localpath = staticmethod(localpath) 66 if (ud.date == "now"):
67 return True
68 return False
72 69
73 def go(self, d, urls = []): 70 def go(self, loc, ud, d):
74 """Fetch urls""" 71 """Fetch urls"""
75 if not urls:
76 urls = self.urls
77 72
73 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
74 return
75
76 svkroot = ud.host + ud.path
77
78 svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module)
79
80 if ud.revision:
81 svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module)
82
83 # create temp directory
78 localdata = data.createCopy(d) 84 localdata = data.createCopy(d)
79 data.setVar('OVERRIDES', "svk:%s" % data.getVar('OVERRIDES', localdata), localdata)
80 data.update_data(localdata) 85 data.update_data(localdata)
81 86 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
82 for loc in urls: 87 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
83 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) 88 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
84 if not "module" in parm: 89 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
85 raise MissingParameterError("svk method needs a 'module' parameter") 90 tmpfile = tmppipe.readline().strip()
86 else: 91 if not tmpfile:
87 module = parm["module"] 92 bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
88 93 raise FetchError(ud.module)
89 dlfile = self.localpath(loc, localdata) 94
90 dldir = data.getVar('DL_DIR', localdata, 1) 95 # check out sources there
91 96 os.chdir(tmpfile)
92# setup svk options 97 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
93 options = [] 98 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd)
94 if 'rev' in parm: 99 myret = os.system(svkcmd)
95 revision = parm['rev'] 100 if myret != 0:
96 else: 101 try:
97 revision = "" 102 os.rmdir(tmpfile)
98 103 except OSError:
99 date = Fetch.getSRCDate(d) 104 pass
100 tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) 105 raise FetchError(ud.module)
101 data.setVar('TARFILES', dlfile, localdata) 106
102 data.setVar('TARFN', tarfn, localdata) 107 os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
103 108 # tar them up to a defined filename
104 if Fetch.check_for_tarball(d, tarfn, dldir, date): 109 myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
105 continue 110 if myret != 0:
106 111 try:
107 olddir = os.path.abspath(os.getcwd()) 112 os.unlink(ud.localpath)
108 os.chdir(data.expand(dldir, localdata)) 113 except OSError:
109 114 pass
110 svkroot = host + path 115 raise FetchError(ud.module)
111 116 # cleanup
112 data.setVar('SVKROOT', svkroot, localdata) 117 os.system('rm -rf %s' % tmpfile)
113 data.setVar('SVKCOOPTS', " ".join(options), localdata)
114 data.setVar('SVKMODULE', module, localdata)
115 svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, module)
116
117 if revision:
118 svkcmd = "svk co -r %s/%s" % (revision, svkroot, module)
119
120# create temp directory
121 bb.debug(2, "Fetch: creating temporary directory")
122 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
123 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
124 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
125 tmpfile = tmppipe.readline().strip()
126 if not tmpfile:
127 bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
128 raise FetchError(module)
129
130# check out sources there
131 os.chdir(tmpfile)
132 bb.note("Fetch " + loc)
133 bb.debug(1, "Running %s" % svkcmd)
134 myret = os.system(svkcmd)
135 if myret != 0:
136 try:
137 os.rmdir(tmpfile)
138 except OSError:
139 pass
140 raise FetchError(module)
141
142 os.chdir(os.path.join(tmpfile, os.path.dirname(module)))
143# tar them up to a defined filename
144 myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module)))
145 if myret != 0:
146 try:
147 os.unlink(tarfn)
148 except OSError:
149 pass
150# cleanup
151 os.system('rm -rf %s' % tmpfile)
152 os.chdir(olddir)
153 del localdata
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
index d1a959371b..b95de2a79b 100644
--- a/bitbake/lib/bb/fetch/svn.py
+++ b/bitbake/lib/bb/fetch/svn.py
@@ -26,6 +26,7 @@ Based on functions from the base bb module, Copyright 2003 Holger Schurig
26""" 26"""
27 27
28import os, re 28import os, re
29import sys
29import bb 30import bb
30from bb import data 31from bb import data
31from bb.fetch import Fetch 32from bb.fetch import Fetch
@@ -34,136 +35,98 @@ from bb.fetch import MissingParameterError
34 35
35class Svn(Fetch): 36class Svn(Fetch):
36 """Class to fetch a module or modules from svn repositories""" 37 """Class to fetch a module or modules from svn repositories"""
37 def supports(url, d): 38 def supports(self, url, ud, d):
38 """Check to see if a given url can be fetched with svn.
39 Expects supplied url in list form, as outputted by bb.decodeurl().
40 """ 39 """
41 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) 40 Check to see if a given url can be fetched with svn.
42 return type in ['svn'] 41 """
43 supports = staticmethod(supports) 42 return ud.type in ['svn']
44
45 def localpath(url, d):
46 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d))
47 if "localpath" in parm:
48# if user overrides local path, use it.
49 return parm["localpath"]
50 43
51 if not "module" in parm: 44 def localpath(self, url, ud, d):
45 if not "module" in ud.parm:
52 raise MissingParameterError("svn method needs a 'module' parameter") 46 raise MissingParameterError("svn method needs a 'module' parameter")
53 else: 47 else:
54 module = parm["module"] 48 ud.module = ud.parm["module"]
55 if 'rev' in parm: 49
56 revision = parm['rev'] 50 ud.revision = ""
57 else: 51 if 'rev' in ud.parm:
58 revision = "" 52 ud.revision = ud.parm['rev']
53
54 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
55
56 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
59 57
60 date = Fetch.getSRCDate(d) 58 def forcefetch(self, url, ud, d):
59 if (ud.date == "now"):
60 return True
61 return False
61 62
62 return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d)) 63 def go(self, loc, ud, d):
63 localpath = staticmethod(localpath) 64 """Fetch url"""
64 65
65 def go(self, d, urls = []): 66 # try to use the tarball stash
66 """Fetch urls""" 67 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
67 if not urls: 68 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath)
68 urls = self.urls 69 return
70
71 proto = "svn"
72 if "proto" in ud.parm:
73 proto = ud.parm["proto"]
74
75 svn_rsh = None
76 if proto == "svn+ssh" and "rsh" in ud.parm:
77 svn_rsh = ud.parm["rsh"]
78
79 svnroot = ud.host + ud.path
80
81 # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now"
82 options = []
83 if ud.revision:
84 options.append("-r %s" % ud.revision)
85 elif ud.date != "now":
86 options.append("-r {%s}" % ud.date)
69 87
70 localdata = data.createCopy(d) 88 localdata = data.createCopy(d)
71 data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) 89 data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata)
72 data.update_data(localdata) 90 data.update_data(localdata)
73 91
74 for loc in urls: 92 data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, ud.module), localdata)
75 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) 93 data.setVar('SVNCOOPTS', " ".join(options), localdata)
76 if not "module" in parm: 94 data.setVar('SVNMODULE', ud.module, localdata)
77 raise MissingParameterError("svn method needs a 'module' parameter") 95 svncmd = data.getVar('FETCHCOMMAND', localdata, 1)
78 else: 96 svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1)
79 module = parm["module"] 97
80 98 if svn_rsh:
81 dlfile = self.localpath(loc, localdata) 99 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
82 dldir = data.getVar('DL_DIR', localdata, 1) 100 svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd)
83# if local path contains the svn 101
84# module, consider the dir above it to be the 102 pkg = data.expand('${PN}', d)
85# download directory 103 pkgdir = os.path.join(data.expand('${SVNDIR}', localdata), pkg)
86# pos = dlfile.find(module) 104 moddir = os.path.join(pkgdir, ud.module)
87# if pos: 105 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'")
88# dldir = dlfile[:pos] 106
89# else: 107 if os.access(os.path.join(moddir, '.svn'), os.R_OK):
90# dldir = os.path.dirname(dlfile) 108 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
91 109 # update sources there
92# setup svn options 110 os.chdir(moddir)
93 options = [] 111 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd)
94 if 'rev' in parm: 112 myret = os.system(svnupcmd)
95 revision = parm['rev'] 113 else:
96 else: 114 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
97 revision = "" 115 # check out sources there
98 116 bb.mkdirhier(pkgdir)
99 date = Fetch.getSRCDate(d) 117 os.chdir(pkgdir)
100 118 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd)
101 if "proto" in parm:
102 proto = parm["proto"]
103 else:
104 proto = "svn"
105
106 svn_rsh = None
107 if proto == "svn+ssh" and "rsh" in parm:
108 svn_rsh = parm["rsh"]
109
110 tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata)
111 data.setVar('TARFILES', dlfile, localdata)
112 data.setVar('TARFN', tarfn, localdata)
113
114 if Fetch.check_for_tarball(d, tarfn, dldir, date):
115 continue
116
117 olddir = os.path.abspath(os.getcwd())
118 os.chdir(data.expand(dldir, localdata))
119
120 svnroot = host + path
121
122 data.setVar('SVNROOT', svnroot, localdata)
123 data.setVar('SVNCOOPTS', " ".join(options), localdata)
124 data.setVar('SVNMODULE', module, localdata)
125 svncmd = data.getVar('FETCHCOMMAND', localdata, 1)
126 svncmd = "svn co -r {%s} %s://%s/%s" % (date, proto, svnroot, module)
127
128 if revision:
129 svncmd = "svn co -r %s %s://%s/%s" % (revision, proto, svnroot, module)
130 elif date == "now":
131 svncmd = "svn co %s://%s/%s" % (proto, svnroot, module)
132
133 if svn_rsh:
134 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
135
136# create temp directory
137 bb.debug(2, "Fetch: creating temporary directory")
138 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
139 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata)
140 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
141 tmpfile = tmppipe.readline().strip()
142 if not tmpfile:
143 bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
144 raise FetchError(module)
145
146# check out sources there
147 os.chdir(tmpfile)
148 bb.note("Fetch " + loc)
149 bb.debug(1, "Running %s" % svncmd)
150 myret = os.system(svncmd) 119 myret = os.system(svncmd)
151 if myret != 0: 120
152 try: 121 if myret != 0:
153 os.rmdir(tmpfile) 122 raise FetchError(ud.module)
154 except OSError: 123
155 pass 124 os.chdir(pkgdir)
156 raise FetchError(module) 125 # tar them up to a defined filename
157 126 myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
158 os.chdir(os.path.join(tmpfile, os.path.dirname(module))) 127 if myret != 0:
159# tar them up to a defined filename 128 try:
160 myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) 129 os.unlink(ud.localpath)
161 if myret != 0: 130 except OSError:
162 try: 131 pass
163 os.unlink(tarfn) 132 raise FetchError(ud.module)
164 except OSError:
165 pass
166# cleanup
167 os.system('rm -rf %s' % tmpfile)
168 os.chdir(olddir)
169 del localdata
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py
index e47a8859be..9c9c1675a1 100644
--- a/bitbake/lib/bb/fetch/wget.py
+++ b/bitbake/lib/bb/fetch/wget.py
@@ -30,138 +30,70 @@ import bb
30from bb import data 30from bb import data
31from bb.fetch import Fetch 31from bb.fetch import Fetch
32from bb.fetch import FetchError 32from bb.fetch import FetchError
33from bb.fetch import MD5SumError
34from bb.fetch import uri_replace 33from bb.fetch import uri_replace
35 34
36class Wget(Fetch): 35class Wget(Fetch):
37 """Class to fetch urls via 'wget'""" 36 """Class to fetch urls via 'wget'"""
38 def supports(url, d): 37 def supports(self, url, ud, d):
39 """Check to see if a given url can be fetched using wget.
40 Expects supplied url in list form, as outputted by bb.decodeurl().
41 """ 38 """
42 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) 39 Check to see if a given url can be fetched with cvs.
43 return type in ['http','https','ftp'] 40 """
44 supports = staticmethod(supports) 41 return ud.type in ['http','https','ftp']
45 42
46 def localpath(url, d): 43 def localpath(self, url, ud, d):
47# strip off parameters 44
48 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) 45 url = bb.encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
49 if "localpath" in parm: 46 ud.basename = os.path.basename(ud.path)
50# if user overrides local path, use it. 47 ud.localfile = data.expand(os.path.basename(url), d)
51 return parm["localpath"] 48
52 url = bb.encodeurl([type, host, path, user, pswd, {}]) 49 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
53 50
54 return os.path.join(data.getVar("DL_DIR", d), os.path.basename(url)) 51 def go(self, uri, ud, d):
55 localpath = staticmethod(localpath)
56
57 def go(self, d, urls = []):
58 """Fetch urls""" 52 """Fetch urls"""
59 53
60 def md5_sum(parm, d): 54 def fetch_uri(uri, ud, d):
61 """ 55 if os.path.exists(ud.localpath):
62 Return the MD5SUM associated with the to be downloaded 56 # file exists, but we didnt complete it.. trying again..
63 file.
64 It can return None if no md5sum is associated
65 """
66 try:
67 return parm['md5sum']
68 except:
69 return None
70
71 def verify_md5sum(wanted_sum, got_sum):
72 """
73 Verify the md5sum we wanted with the one we got
74 """
75 if not wanted_sum:
76 return True
77
78 return wanted_sum == got_sum
79
80 def fetch_uri(uri, basename, dl, md5, parm, d):
81 # the MD5 sum we want to verify
82 wanted_md5sum = md5_sum(parm, d)
83 if os.path.exists(dl):
84# file exists, but we didnt complete it.. trying again..
85 fetchcmd = data.getVar("RESUMECOMMAND", d, 1) 57 fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
86 else: 58 else:
87 fetchcmd = data.getVar("FETCHCOMMAND", d, 1) 59 fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
88 60
89 bb.note("fetch " + uri) 61 bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
90 fetchcmd = fetchcmd.replace("${URI}", uri) 62 fetchcmd = fetchcmd.replace("${URI}", uri)
91 fetchcmd = fetchcmd.replace("${FILE}", basename) 63 fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
92 bb.debug(2, "executing " + fetchcmd) 64 bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
93 ret = os.system(fetchcmd) 65 ret = os.system(fetchcmd)
94 if ret != 0: 66 if ret != 0:
95 return False 67 return False
96 68
97 # check if sourceforge did send us to the mirror page 69 # check if sourceforge did send us to the mirror page
98 dl_dir = data.getVar("DL_DIR", d, True) 70 if not os.path.exists(ud.localpath):
99 if not os.path.exists(dl): 71 os.system("rm %s*" % ud.localpath) # FIXME shell quote it
100 os.system("rm %s*" % dl) # FIXME shell quote it 72 bb.msg.debug(2, bb.msg.domain.Fetcher, "sourceforge.net send us to the mirror on %s" % ud.basename)
101 bb.debug(2,"sourceforge.net send us to the mirror on %s" % basename)
102 return False 73 return False
103 74
104# supposedly complete.. write out md5sum
105 if bb.which(data.getVar('PATH', d), 'md5sum'):
106 try:
107 md5pipe = os.popen('md5sum ' + dl)
108 md5data = (md5pipe.readline().split() or [ "" ])[0]
109 md5pipe.close()
110 except OSError:
111 md5data = ""
112
113 # verify the md5sum
114 if not verify_md5sum(wanted_md5sum, md5data):
115 raise MD5SumError(uri)
116
117 md5out = file(md5, 'w')
118 md5out.write(md5data)
119 md5out.close()
120 return True 75 return True
121 76
122 if not urls:
123 urls = self.urls
124
125 localdata = data.createCopy(d) 77 localdata = data.createCopy(d)
126 data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) 78 data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
127 data.update_data(localdata) 79 data.update_data(localdata)
128 80
129 for uri in urls: 81 premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
130 completed = 0 82 for (find, replace) in premirrors:
131 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata)) 83 newuri = uri_replace(uri, find, replace, d)
132 basename = os.path.basename(path) 84 if newuri != uri:
133 dl = self.localpath(uri, d) 85 if fetch_uri(newuri, ud, localdata):
134 dl = data.expand(dl, localdata) 86 return
135 md5 = dl + '.md5' 87
136 88 if fetch_uri(uri, ud, localdata):
137 if os.path.exists(md5): 89 return
138# complete, nothing to see here.. 90
139 continue 91 # try mirrors
140 92 mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
141 premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] 93 for (find, replace) in mirrors:
142 for (find, replace) in premirrors: 94 newuri = uri_replace(uri, find, replace, d)
143 newuri = uri_replace(uri, find, replace, d) 95 if newuri != uri:
144 if newuri != uri: 96 if fetch_uri(newuri, ud, localdata):
145 if fetch_uri(newuri, basename, dl, md5, parm, localdata): 97 return
146 completed = 1 98
147 break 99 raise FetchError(uri)
148
149 if completed:
150 continue
151
152 if fetch_uri(uri, basename, dl, md5, parm, localdata):
153 continue
154
155# try mirrors
156 mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
157 for (find, replace) in mirrors:
158 newuri = uri_replace(uri, find, replace, d)
159 if newuri != uri:
160 if fetch_uri(newuri, basename, dl, md5, parm, localdata):
161 completed = 1
162 break
163
164 if not completed:
165 raise FetchError(uri)
166
167 del localdata