diff options
author | Richard Purdie <richard@openedhand.com> | 2006-02-10 11:43:38 +0000 |
---|---|---|
committer | Richard Purdie <richard@openedhand.com> | 2006-02-10 11:43:38 +0000 |
commit | ce241d7e6d8e31411867b5f2127b02c776b39d8a (patch) | |
tree | ebc7996fff58d59f1b55cfb550c29f9708e59586 | |
parent | b236090192c018bf8efe8c72019d3d1567b6d158 (diff) | |
download | poky-ce241d7e6d8e31411867b5f2127b02c776b39d8a.tar.gz |
Update to 1.3.3 release of bitbake
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@269 311d38ba-8fff-0310-9ca6-ca027cbcb966
-rw-r--r-- | bitbake/ChangeLog | 18 | ||||
-rwxr-xr-x | bitbake/bin/bitbake | 5 | ||||
-rw-r--r-- | bitbake/lib/bb/__init__.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch.py | 656 |
4 files changed, 15 insertions, 666 deletions
diff --git a/bitbake/ChangeLog b/bitbake/ChangeLog index e8a91c1701..c05ff96ab9 100644 --- a/bitbake/ChangeLog +++ b/bitbake/ChangeLog | |||
@@ -1,4 +1,4 @@ | |||
1 | Changes in BitBake 1.3.?: | 1 | Changes in BitBake 1.3.3: |
2 | - Create a new Fetcher module to ease the | 2 | - Create a new Fetcher module to ease the |
3 | development of new Fetchers. | 3 | development of new Fetchers. |
4 | Issue #438 fixed by rpurdie@openedhand.com | 4 | Issue #438 fixed by rpurdie@openedhand.com |
@@ -7,13 +7,19 @@ Changes in BitBake 1.3.?: | |||
7 | Issue #555 fixed by chris@openedhand.com | 7 | Issue #555 fixed by chris@openedhand.com |
8 | - Expand PREFERRED_PROVIDER properly | 8 | - Expand PREFERRED_PROVIDER properly |
9 | Issue #436 fixed by rprudie@openedhand.com | 9 | Issue #436 fixed by rprudie@openedhand.com |
10 | - Typo fix for Issue #531 Philipp Zabel for the | 10 | - Typo fix for Issue #531 by Philipp Zabel for the |
11 | BitBake Shell | 11 | BitBake Shell |
12 | - Introduce a new special variable SRCDATE as | 12 | - Introduce a new special variable SRCDATE as |
13 | a generic naming to take over CVSDATE. | 13 | a generic naming to replace CVSDATE. |
14 | - Introduce a new keyword 'required' In contrast | 14 | - Introduce a new keyword 'required'. In contrast |
15 | to include parsing will fail if a to be included | 15 | to 'include' parsing will fail if a to be included |
16 | can not be found. | 16 | file can not be found. |
17 | - Remove hardcoding of the STAMP directory. Patch | ||
18 | courtsey pHilipp Zabel | ||
19 | - Track the RDEPENDS of each package (rpurdie@openedhand.com) | ||
20 | - Introduce BUILD_ALL_DEPS to build all RDEPENDS. E.g | ||
21 | this is used by the OpenEmbedded Meta Packages. | ||
22 | (rpurdie@openedhand.com). | ||
17 | 23 | ||
18 | Changes in BitBake 1.3.2: | 24 | Changes in BitBake 1.3.2: |
19 | - reintegration of make.py into BitBake | 25 | - reintegration of make.py into BitBake |
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake index 09417f6eca..b1fd76a4f1 100755 --- a/bitbake/bin/bitbake +++ b/bitbake/bin/bitbake | |||
@@ -31,7 +31,7 @@ import itertools, optparse | |||
31 | parsespin = itertools.cycle( r'|/-\\' ) | 31 | parsespin = itertools.cycle( r'|/-\\' ) |
32 | bbdebug = 0 | 32 | bbdebug = 0 |
33 | 33 | ||
34 | __version__ = "1.3.2" | 34 | __version__ = "1.3.3" |
35 | 35 | ||
36 | #============================================================================# | 36 | #============================================================================# |
37 | # BBParsingStatus | 37 | # BBParsingStatus |
@@ -461,8 +461,7 @@ class BBCooker: | |||
461 | pn = bb.data.getVar('PN', the_data, 1) | 461 | pn = bb.data.getVar('PN', the_data, 1) |
462 | pv = bb.data.getVar('PV', the_data, 1) | 462 | pv = bb.data.getVar('PV', the_data, 1) |
463 | pr = bb.data.getVar('PR', the_data, 1) | 463 | pr = bb.data.getVar('PR', the_data, 1) |
464 | tmpdir = bb.data.getVar('TMPDIR', the_data, 1) | 464 | stamp = '%s.do_populate_staging' % bb.data.getVar('STAMP', the_data, 1) |
465 | stamp = '%s/stamps/%s-%s-%s.do_populate_staging' % (tmpdir, pn, pv, pr) | ||
466 | if os.path.exists(stamp): | 465 | if os.path.exists(stamp): |
467 | (newvers, fn) = preferred_versions[pn] | 466 | (newvers, fn) = preferred_versions[pn] |
468 | if not fn in eligible: | 467 | if not fn in eligible: |
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index f27f53b39d..dabe978bf5 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
@@ -23,7 +23,7 @@ this program; if not, write to the Free Software Foundation, Inc., 59 Temple | |||
23 | Place, Suite 330, Boston, MA 02111-1307 USA. | 23 | Place, Suite 330, Boston, MA 02111-1307 USA. |
24 | """ | 24 | """ |
25 | 25 | ||
26 | __version__ = "1.3.2.1" | 26 | __version__ = "1.3.3.0" |
27 | 27 | ||
28 | __all__ = [ | 28 | __all__ = [ |
29 | 29 | ||
diff --git a/bitbake/lib/bb/fetch.py b/bitbake/lib/bb/fetch.py deleted file mode 100644 index 230dd21e2e..0000000000 --- a/bitbake/lib/bb/fetch.py +++ /dev/null | |||
@@ -1,656 +0,0 @@ | |||
1 | #!/usr/bin/env python | ||
2 | # ex:ts=4:sw=4:sts=4:et | ||
3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
4 | """ | ||
5 | BitBake 'Fetch' implementations | ||
6 | |||
7 | Classes for obtaining upstream sources for the | ||
8 | BitBake build tools. | ||
9 | |||
10 | Copyright (C) 2003, 2004 Chris Larson | ||
11 | |||
12 | This program is free software; you can redistribute it and/or modify it under | ||
13 | the terms of the GNU General Public License as published by the Free Software | ||
14 | Foundation; either version 2 of the License, or (at your option) any later | ||
15 | version. | ||
16 | |||
17 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
18 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
19 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
20 | |||
21 | You should have received a copy of the GNU General Public License along with | ||
22 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
23 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
24 | |||
25 | Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
26 | """ | ||
27 | |||
28 | import os, re | ||
29 | import bb | ||
30 | from bb import data | ||
31 | |||
32 | class FetchError(Exception): | ||
33 | """Exception raised when a download fails""" | ||
34 | |||
35 | class NoMethodError(Exception): | ||
36 | """Exception raised when there is no method to obtain a supplied url or set of urls""" | ||
37 | |||
38 | class MissingParameterError(Exception): | ||
39 | """Exception raised when a fetch method is missing a critical parameter in the url""" | ||
40 | |||
41 | class MD5SumError(Exception): | ||
42 | """Exception raised when a MD5SUM of a file does not match the expected one""" | ||
43 | |||
44 | def uri_replace(uri, uri_find, uri_replace, d): | ||
45 | # bb.note("uri_replace: operating on %s" % uri) | ||
46 | if not uri or not uri_find or not uri_replace: | ||
47 | bb.debug(1, "uri_replace: passed an undefined value, not replacing") | ||
48 | uri_decoded = list(bb.decodeurl(uri)) | ||
49 | uri_find_decoded = list(bb.decodeurl(uri_find)) | ||
50 | uri_replace_decoded = list(bb.decodeurl(uri_replace)) | ||
51 | result_decoded = ['','','','','',{}] | ||
52 | for i in uri_find_decoded: | ||
53 | loc = uri_find_decoded.index(i) | ||
54 | result_decoded[loc] = uri_decoded[loc] | ||
55 | import types | ||
56 | if type(i) == types.StringType: | ||
57 | import re | ||
58 | if (re.match(i, uri_decoded[loc])): | ||
59 | result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) | ||
60 | if uri_find_decoded.index(i) == 2: | ||
61 | if d: | ||
62 | localfn = bb.fetch.localpath(uri, d) | ||
63 | if localfn: | ||
64 | result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) | ||
65 | # bb.note("uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) | ||
66 | else: | ||
67 | # bb.note("uri_replace: no match") | ||
68 | return uri | ||
69 | # else: | ||
70 | # for j in i.keys(): | ||
71 | # FIXME: apply replacements against options | ||
72 | return bb.encodeurl(result_decoded) | ||
73 | |||
74 | methods = [] | ||
75 | |||
76 | def init(urls = [], d = None): | ||
77 | if d == None: | ||
78 | bb.debug(2,"BUG init called with None as data object!!!") | ||
79 | return | ||
80 | |||
81 | for m in methods: | ||
82 | m.urls = [] | ||
83 | |||
84 | for u in urls: | ||
85 | for m in methods: | ||
86 | m.data = d | ||
87 | if m.supports(u, d): | ||
88 | m.urls.append(u) | ||
89 | |||
90 | def go(d): | ||
91 | """Fetch all urls""" | ||
92 | for m in methods: | ||
93 | if m.urls: | ||
94 | m.go(d) | ||
95 | |||
96 | def localpaths(d): | ||
97 | """Return a list of the local filenames, assuming successful fetch""" | ||
98 | local = [] | ||
99 | for m in methods: | ||
100 | for u in m.urls: | ||
101 | local.append(m.localpath(u, d)) | ||
102 | return local | ||
103 | |||
104 | def localpath(url, d): | ||
105 | for m in methods: | ||
106 | if m.supports(url, d): | ||
107 | return m.localpath(url, d) | ||
108 | return url | ||
109 | |||
110 | class Fetch(object): | ||
111 | """Base class for 'fetch'ing data""" | ||
112 | |||
113 | def __init__(self, urls = []): | ||
114 | self.urls = [] | ||
115 | for url in urls: | ||
116 | if self.supports(bb.decodeurl(url), d) is 1: | ||
117 | self.urls.append(url) | ||
118 | |||
119 | def supports(url, d): | ||
120 | """Check to see if this fetch class supports a given url. | ||
121 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
122 | """ | ||
123 | return 0 | ||
124 | supports = staticmethod(supports) | ||
125 | |||
126 | def localpath(url, d): | ||
127 | """Return the local filename of a given url assuming a successful fetch. | ||
128 | """ | ||
129 | return url | ||
130 | localpath = staticmethod(localpath) | ||
131 | |||
132 | def setUrls(self, urls): | ||
133 | self.__urls = urls | ||
134 | |||
135 | def getUrls(self): | ||
136 | return self.__urls | ||
137 | |||
138 | urls = property(getUrls, setUrls, None, "Urls property") | ||
139 | |||
140 | def setData(self, data): | ||
141 | self.__data = data | ||
142 | |||
143 | def getData(self): | ||
144 | return self.__data | ||
145 | |||
146 | data = property(getData, setData, None, "Data property") | ||
147 | |||
148 | def go(self, urls = []): | ||
149 | """Fetch urls""" | ||
150 | raise NoMethodError("Missing implementation for url") | ||
151 | |||
152 | class Wget(Fetch): | ||
153 | """Class to fetch urls via 'wget'""" | ||
154 | def supports(url, d): | ||
155 | """Check to see if a given url can be fetched using wget. | ||
156 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
157 | """ | ||
158 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
159 | return type in ['http','https','ftp'] | ||
160 | supports = staticmethod(supports) | ||
161 | |||
162 | def localpath(url, d): | ||
163 | # strip off parameters | ||
164 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
165 | if "localpath" in parm: | ||
166 | # if user overrides local path, use it. | ||
167 | return parm["localpath"] | ||
168 | url = bb.encodeurl([type, host, path, user, pswd, {}]) | ||
169 | |||
170 | return os.path.join(data.getVar("DL_DIR", d), os.path.basename(url)) | ||
171 | localpath = staticmethod(localpath) | ||
172 | |||
173 | def go(self, d, urls = []): | ||
174 | """Fetch urls""" | ||
175 | |||
176 | def md5_sum(basename, data): | ||
177 | """ | ||
178 | Fast and incomplete OVERRIDE implementation for MD5SUM handling | ||
179 | MD5SUM_basename = "SUM" and fallback to MD5SUM_basename | ||
180 | """ | ||
181 | var = "MD5SUM_%s" % basename | ||
182 | return getVar(var, data) or get("MD5SUM",data) | ||
183 | |||
184 | |||
185 | def fetch_uri(uri, basename, dl, md5, parm, d): | ||
186 | if os.path.exists(dl): | ||
187 | # file exists, but we didnt complete it.. trying again.. | ||
188 | fetchcmd = data.getVar("RESUMECOMMAND", d, 1) | ||
189 | else: | ||
190 | fetchcmd = data.getVar("FETCHCOMMAND", d, 1) | ||
191 | |||
192 | bb.note("fetch " + uri) | ||
193 | fetchcmd = fetchcmd.replace("${URI}", uri) | ||
194 | fetchcmd = fetchcmd.replace("${FILE}", basename) | ||
195 | bb.debug(2, "executing " + fetchcmd) | ||
196 | ret = os.system(fetchcmd) | ||
197 | if ret != 0: | ||
198 | return False | ||
199 | |||
200 | # check if sourceforge did send us to the mirror page | ||
201 | dl_dir = data.getVar("DL_DIR", d, True) | ||
202 | if not os.path.exists(dl): | ||
203 | os.system("rm %s*" % dl) # FIXME shell quote it | ||
204 | bb.debug(2,"sourceforge.net send us to the mirror on %s" % basename) | ||
205 | return False | ||
206 | |||
207 | # supposedly complete.. write out md5sum | ||
208 | if bb.which(data.getVar('PATH', d), 'md5sum'): | ||
209 | try: | ||
210 | md5pipe = os.popen('md5sum ' + dl) | ||
211 | md5data = (md5pipe.readline().split() or [ "" ])[0] | ||
212 | md5pipe.close() | ||
213 | except OSError: | ||
214 | md5data = "" | ||
215 | md5out = file(md5, 'w') | ||
216 | md5out.write(md5data) | ||
217 | md5out.close() | ||
218 | else: | ||
219 | md5out = file(md5, 'w') | ||
220 | md5out.write("") | ||
221 | md5out.close() | ||
222 | return True | ||
223 | |||
224 | if not urls: | ||
225 | urls = self.urls | ||
226 | |||
227 | localdata = data.createCopy(d) | ||
228 | data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) | ||
229 | data.update_data(localdata) | ||
230 | |||
231 | for uri in urls: | ||
232 | completed = 0 | ||
233 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata)) | ||
234 | basename = os.path.basename(path) | ||
235 | dl = self.localpath(uri, d) | ||
236 | dl = data.expand(dl, localdata) | ||
237 | md5 = dl + '.md5' | ||
238 | |||
239 | if os.path.exists(md5): | ||
240 | # complete, nothing to see here.. | ||
241 | continue | ||
242 | |||
243 | premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] | ||
244 | for (find, replace) in premirrors: | ||
245 | newuri = uri_replace(uri, find, replace, d) | ||
246 | if newuri != uri: | ||
247 | if fetch_uri(newuri, basename, dl, md5, parm, localdata): | ||
248 | completed = 1 | ||
249 | break | ||
250 | |||
251 | if completed: | ||
252 | continue | ||
253 | |||
254 | if fetch_uri(uri, basename, dl, md5, parm, localdata): | ||
255 | continue | ||
256 | |||
257 | # try mirrors | ||
258 | mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] | ||
259 | for (find, replace) in mirrors: | ||
260 | newuri = uri_replace(uri, find, replace, d) | ||
261 | if newuri != uri: | ||
262 | if fetch_uri(newuri, basename, dl, md5, parm, localdata): | ||
263 | completed = 1 | ||
264 | break | ||
265 | |||
266 | if not completed: | ||
267 | raise FetchError(uri) | ||
268 | |||
269 | del localdata | ||
270 | |||
271 | |||
272 | methods.append(Wget()) | ||
273 | |||
274 | class Cvs(Fetch): | ||
275 | """Class to fetch a module or modules from cvs repositories""" | ||
276 | def supports(url, d): | ||
277 | """Check to see if a given url can be fetched with cvs. | ||
278 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
279 | """ | ||
280 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
281 | return type in ['cvs', 'pserver'] | ||
282 | supports = staticmethod(supports) | ||
283 | |||
284 | def localpath(url, d): | ||
285 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
286 | if "localpath" in parm: | ||
287 | # if user overrides local path, use it. | ||
288 | return parm["localpath"] | ||
289 | |||
290 | if not "module" in parm: | ||
291 | raise MissingParameterError("cvs method needs a 'module' parameter") | ||
292 | else: | ||
293 | module = parm["module"] | ||
294 | if 'tag' in parm: | ||
295 | tag = parm['tag'] | ||
296 | else: | ||
297 | tag = "" | ||
298 | if 'date' in parm: | ||
299 | date = parm['date'] | ||
300 | else: | ||
301 | if not tag: | ||
302 | date = data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
303 | else: | ||
304 | date = "" | ||
305 | |||
306 | return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, tag, date), d)) | ||
307 | localpath = staticmethod(localpath) | ||
308 | |||
309 | def go(self, d, urls = []): | ||
310 | """Fetch urls""" | ||
311 | if not urls: | ||
312 | urls = self.urls | ||
313 | |||
314 | localdata = data.createCopy(d) | ||
315 | data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
316 | data.update_data(localdata) | ||
317 | |||
318 | for loc in urls: | ||
319 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) | ||
320 | if not "module" in parm: | ||
321 | raise MissingParameterError("cvs method needs a 'module' parameter") | ||
322 | else: | ||
323 | module = parm["module"] | ||
324 | |||
325 | dlfile = self.localpath(loc, localdata) | ||
326 | dldir = data.getVar('DL_DIR', localdata, 1) | ||
327 | # if local path contains the cvs | ||
328 | # module, consider the dir above it to be the | ||
329 | # download directory | ||
330 | # pos = dlfile.find(module) | ||
331 | # if pos: | ||
332 | # dldir = dlfile[:pos] | ||
333 | # else: | ||
334 | # dldir = os.path.dirname(dlfile) | ||
335 | |||
336 | # setup cvs options | ||
337 | options = [] | ||
338 | if 'tag' in parm: | ||
339 | tag = parm['tag'] | ||
340 | else: | ||
341 | tag = "" | ||
342 | |||
343 | if 'date' in parm: | ||
344 | date = parm['date'] | ||
345 | else: | ||
346 | if not tag: | ||
347 | date = data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
348 | else: | ||
349 | date = "" | ||
350 | |||
351 | if "method" in parm: | ||
352 | method = parm["method"] | ||
353 | else: | ||
354 | method = "pserver" | ||
355 | |||
356 | if "localdir" in parm: | ||
357 | localdir = parm["localdir"] | ||
358 | else: | ||
359 | localdir = module | ||
360 | |||
361 | cvs_rsh = None | ||
362 | if method == "ext": | ||
363 | if "rsh" in parm: | ||
364 | cvs_rsh = parm["rsh"] | ||
365 | |||
366 | tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata) | ||
367 | data.setVar('TARFILES', dlfile, localdata) | ||
368 | data.setVar('TARFN', tarfn, localdata) | ||
369 | |||
370 | dl = os.path.join(dldir, tarfn) | ||
371 | if os.access(dl, os.R_OK): | ||
372 | bb.debug(1, "%s already exists, skipping cvs checkout." % tarfn) | ||
373 | continue | ||
374 | |||
375 | pn = data.getVar('PN', d, 1) | ||
376 | cvs_tarball_stash = None | ||
377 | if pn: | ||
378 | cvs_tarball_stash = data.getVar('CVS_TARBALL_STASH_%s' % pn, d, 1) | ||
379 | if cvs_tarball_stash == None: | ||
380 | cvs_tarball_stash = data.getVar('CVS_TARBALL_STASH', d, 1) | ||
381 | if cvs_tarball_stash: | ||
382 | fetchcmd = data.getVar("FETCHCOMMAND_wget", d, 1) | ||
383 | uri = cvs_tarball_stash + tarfn | ||
384 | bb.note("fetch " + uri) | ||
385 | fetchcmd = fetchcmd.replace("${URI}", uri) | ||
386 | ret = os.system(fetchcmd) | ||
387 | if ret == 0: | ||
388 | bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) | ||
389 | continue | ||
390 | |||
391 | if date: | ||
392 | options.append("-D %s" % date) | ||
393 | if tag: | ||
394 | options.append("-r %s" % tag) | ||
395 | |||
396 | olddir = os.path.abspath(os.getcwd()) | ||
397 | os.chdir(data.expand(dldir, localdata)) | ||
398 | |||
399 | # setup cvsroot | ||
400 | if method == "dir": | ||
401 | cvsroot = path | ||
402 | else: | ||
403 | cvsroot = ":" + method + ":" + user | ||
404 | if pswd: | ||
405 | cvsroot += ":" + pswd | ||
406 | cvsroot += "@" + host + ":" + path | ||
407 | |||
408 | data.setVar('CVSROOT', cvsroot, localdata) | ||
409 | data.setVar('CVSCOOPTS', " ".join(options), localdata) | ||
410 | data.setVar('CVSMODULE', module, localdata) | ||
411 | cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
412 | cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) | ||
413 | |||
414 | if cvs_rsh: | ||
415 | cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) | ||
416 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) | ||
417 | |||
418 | # create module directory | ||
419 | bb.debug(2, "Fetch: checking for module directory") | ||
420 | pkg=data.expand('${PN}', d) | ||
421 | pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg) | ||
422 | moddir=os.path.join(pkgdir,localdir) | ||
423 | if os.access(os.path.join(moddir,'CVS'), os.R_OK): | ||
424 | bb.note("Update " + loc) | ||
425 | # update sources there | ||
426 | os.chdir(moddir) | ||
427 | myret = os.system(cvsupdatecmd) | ||
428 | else: | ||
429 | bb.note("Fetch " + loc) | ||
430 | # check out sources there | ||
431 | bb.mkdirhier(pkgdir) | ||
432 | os.chdir(pkgdir) | ||
433 | bb.debug(1, "Running %s" % cvscmd) | ||
434 | myret = os.system(cvscmd) | ||
435 | |||
436 | if myret != 0: | ||
437 | try: | ||
438 | os.rmdir(moddir) | ||
439 | except OSError: | ||
440 | pass | ||
441 | raise FetchError(module) | ||
442 | |||
443 | os.chdir(moddir) | ||
444 | os.chdir('..') | ||
445 | # tar them up to a defined filename | ||
446 | myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir))) | ||
447 | if myret != 0: | ||
448 | try: | ||
449 | os.unlink(tarfn) | ||
450 | except OSError: | ||
451 | pass | ||
452 | os.chdir(olddir) | ||
453 | del localdata | ||
454 | |||
455 | methods.append(Cvs()) | ||
456 | |||
457 | class Bk(Fetch): | ||
458 | def supports(url, d): | ||
459 | """Check to see if a given url can be fetched via bitkeeper. | ||
460 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
461 | """ | ||
462 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
463 | return type in ['bk'] | ||
464 | supports = staticmethod(supports) | ||
465 | |||
466 | methods.append(Bk()) | ||
467 | |||
468 | class Local(Fetch): | ||
469 | def supports(url, d): | ||
470 | """Check to see if a given url can be fetched in the local filesystem. | ||
471 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
472 | """ | ||
473 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
474 | return type in ['file','patch'] | ||
475 | supports = staticmethod(supports) | ||
476 | |||
477 | def localpath(url, d): | ||
478 | """Return the local filename of a given url assuming a successful fetch. | ||
479 | """ | ||
480 | path = url.split("://")[1] | ||
481 | newpath = path | ||
482 | if path[0] != "/": | ||
483 | filespath = data.getVar('FILESPATH', d, 1) | ||
484 | if filespath: | ||
485 | newpath = bb.which(filespath, path) | ||
486 | if not newpath: | ||
487 | filesdir = data.getVar('FILESDIR', d, 1) | ||
488 | if filesdir: | ||
489 | newpath = os.path.join(filesdir, path) | ||
490 | return newpath | ||
491 | localpath = staticmethod(localpath) | ||
492 | |||
493 | def go(self, urls = []): | ||
494 | """Fetch urls (no-op for Local method)""" | ||
495 | # no need to fetch local files, we'll deal with them in place. | ||
496 | return 1 | ||
497 | |||
498 | methods.append(Local()) | ||
499 | |||
500 | class Svn(Fetch): | ||
501 | """Class to fetch a module or modules from svn repositories""" | ||
502 | def supports(url, d): | ||
503 | """Check to see if a given url can be fetched with svn. | ||
504 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
505 | """ | ||
506 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
507 | return type in ['svn'] | ||
508 | supports = staticmethod(supports) | ||
509 | |||
510 | def localpath(url, d): | ||
511 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
512 | if "localpath" in parm: | ||
513 | # if user overrides local path, use it. | ||
514 | return parm["localpath"] | ||
515 | |||
516 | if not "module" in parm: | ||
517 | raise MissingParameterError("svn method needs a 'module' parameter") | ||
518 | else: | ||
519 | module = parm["module"] | ||
520 | if 'rev' in parm: | ||
521 | revision = parm['rev'] | ||
522 | else: | ||
523 | revision = "" | ||
524 | |||
525 | date = data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
526 | |||
527 | return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/','.'), revision, date), d)) | ||
528 | localpath = staticmethod(localpath) | ||
529 | |||
530 | def go(self, d, urls = []): | ||
531 | """Fetch urls""" | ||
532 | if not urls: | ||
533 | urls = self.urls | ||
534 | |||
535 | localdata = data.createCopy(d) | ||
536 | data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
537 | data.update_data(localdata) | ||
538 | |||
539 | for loc in urls: | ||
540 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) | ||
541 | if not "module" in parm: | ||
542 | raise MissingParameterError("svn method needs a 'module' parameter") | ||
543 | else: | ||
544 | module = parm["module"] | ||
545 | |||
546 | dlfile = self.localpath(loc, localdata) | ||
547 | dldir = data.getVar('DL_DIR', localdata, 1) | ||
548 | # if local path contains the svn | ||
549 | # module, consider the dir above it to be the | ||
550 | # download directory | ||
551 | # pos = dlfile.find(module) | ||
552 | # if pos: | ||
553 | # dldir = dlfile[:pos] | ||
554 | # else: | ||
555 | # dldir = os.path.dirname(dlfile) | ||
556 | |||
557 | # setup svn options | ||
558 | options = [] | ||
559 | if 'rev' in parm: | ||
560 | revision = parm['rev'] | ||
561 | else: | ||
562 | revision = "" | ||
563 | |||
564 | date = data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
565 | |||
566 | if "method" in parm: | ||
567 | method = parm["method"] | ||
568 | else: | ||
569 | method = "pserver" | ||
570 | |||
571 | if "proto" in parm: | ||
572 | proto = parm["proto"] | ||
573 | else: | ||
574 | proto = "svn" | ||
575 | |||
576 | svn_rsh = None | ||
577 | if method == "ext": | ||
578 | if "rsh" in parm: | ||
579 | svn_rsh = parm["rsh"] | ||
580 | |||
581 | tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) | ||
582 | data.setVar('TARFILES', dlfile, localdata) | ||
583 | data.setVar('TARFN', tarfn, localdata) | ||
584 | |||
585 | dl = os.path.join(dldir, tarfn) | ||
586 | if os.access(dl, os.R_OK): | ||
587 | bb.debug(1, "%s already exists, skipping svn checkout." % tarfn) | ||
588 | continue | ||
589 | |||
590 | svn_tarball_stash = data.getVar('CVS_TARBALL_STASH', d, 1) | ||
591 | if svn_tarball_stash: | ||
592 | fetchcmd = data.getVar("FETCHCOMMAND_wget", d, 1) | ||
593 | uri = svn_tarball_stash + tarfn | ||
594 | bb.note("fetch " + uri) | ||
595 | fetchcmd = fetchcmd.replace("${URI}", uri) | ||
596 | ret = os.system(fetchcmd) | ||
597 | if ret == 0: | ||
598 | bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) | ||
599 | continue | ||
600 | |||
601 | olddir = os.path.abspath(os.getcwd()) | ||
602 | os.chdir(data.expand(dldir, localdata)) | ||
603 | |||
604 | # setup svnroot | ||
605 | # svnroot = ":" + method + ":" + user | ||
606 | # if pswd: | ||
607 | # svnroot += ":" + pswd | ||
608 | svnroot = host + path | ||
609 | |||
610 | data.setVar('SVNROOT', svnroot, localdata) | ||
611 | data.setVar('SVNCOOPTS', " ".join(options), localdata) | ||
612 | data.setVar('SVNMODULE', module, localdata) | ||
613 | svncmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
614 | svncmd = "svn co -r {%s} %s://%s/%s" % (date, proto, svnroot, module) | ||
615 | |||
616 | if revision: | ||
617 | svncmd = "svn co -r %s %s://%s/%s" % (revision, proto, svnroot, module) | ||
618 | if svn_rsh: | ||
619 | svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) | ||
620 | |||
621 | # create temp directory | ||
622 | bb.debug(2, "Fetch: creating temporary directory") | ||
623 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
624 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata) | ||
625 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
626 | tmpfile = tmppipe.readline().strip() | ||
627 | if not tmpfile: | ||
628 | bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
629 | raise FetchError(module) | ||
630 | |||
631 | # check out sources there | ||
632 | os.chdir(tmpfile) | ||
633 | bb.note("Fetch " + loc) | ||
634 | bb.debug(1, "Running %s" % svncmd) | ||
635 | myret = os.system(svncmd) | ||
636 | if myret != 0: | ||
637 | try: | ||
638 | os.rmdir(tmpfile) | ||
639 | except OSError: | ||
640 | pass | ||
641 | raise FetchError(module) | ||
642 | |||
643 | os.chdir(os.path.join(tmpfile, os.path.dirname(module))) | ||
644 | # tar them up to a defined filename | ||
645 | myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) | ||
646 | if myret != 0: | ||
647 | try: | ||
648 | os.unlink(tarfn) | ||
649 | except OSError: | ||
650 | pass | ||
651 | # cleanup | ||
652 | os.system('rm -rf %s' % tmpfile) | ||
653 | os.chdir(olddir) | ||
654 | del localdata | ||
655 | |||
656 | methods.append(Svn()) | ||