diff options
author | Richard Purdie <richard@openedhand.com> | 2008-09-30 15:08:33 +0000 |
---|---|---|
committer | Richard Purdie <richard@openedhand.com> | 2008-09-30 15:08:33 +0000 |
commit | c30eddb243e7e65f67f656e62848a033cf6f2e5c (patch) | |
tree | 110dd95788b76f55d31cb8d30aac2de8400b6f4a /bitbake-dev/lib/bb/fetch | |
parent | 5ef0510474004eeb2ae8a99b64e2febb1920e077 (diff) | |
download | poky-c30eddb243e7e65f67f656e62848a033cf6f2e5c.tar.gz |
Add bitbake-dev to allow ease of testing and development of bitbake trunk
git-svn-id: https://svn.o-hand.com/repos/poky/trunk@5337 311d38ba-8fff-0310-9ca6-ca027cbcb966
Diffstat (limited to 'bitbake-dev/lib/bb/fetch')
-rw-r--r-- | bitbake-dev/lib/bb/fetch/__init__.py | 556 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/bzr.py | 154 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/cvs.py | 178 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/git.py | 142 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/hg.py | 141 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/local.py | 72 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/perforce.py | 213 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/ssh.py | 120 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/svk.py | 109 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/svn.py | 204 | ||||
-rw-r--r-- | bitbake-dev/lib/bb/fetch/wget.py | 105 |
11 files changed, 1994 insertions, 0 deletions
diff --git a/bitbake-dev/lib/bb/fetch/__init__.py b/bitbake-dev/lib/bb/fetch/__init__.py new file mode 100644 index 0000000000..c3bea447c1 --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/__init__.py | |||
@@ -0,0 +1,556 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementations | ||
5 | |||
6 | Classes for obtaining upstream sources for the | ||
7 | BitBake build tools. | ||
8 | """ | ||
9 | |||
10 | # Copyright (C) 2003, 2004 Chris Larson | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify | ||
13 | # it under the terms of the GNU General Public License version 2 as | ||
14 | # published by the Free Software Foundation. | ||
15 | # | ||
16 | # This program is distributed in the hope that it will be useful, | ||
17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
19 | # GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along | ||
22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
24 | # | ||
25 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
26 | |||
27 | import os, re, fcntl | ||
28 | import bb | ||
29 | from bb import data | ||
30 | from bb import persist_data | ||
31 | |||
32 | try: | ||
33 | import cPickle as pickle | ||
34 | except ImportError: | ||
35 | import pickle | ||
36 | |||
37 | class FetchError(Exception): | ||
38 | """Exception raised when a download fails""" | ||
39 | |||
40 | class NoMethodError(Exception): | ||
41 | """Exception raised when there is no method to obtain a supplied url or set of urls""" | ||
42 | |||
43 | class MissingParameterError(Exception): | ||
44 | """Exception raised when a fetch method is missing a critical parameter in the url""" | ||
45 | |||
46 | class ParameterError(Exception): | ||
47 | """Exception raised when a url cannot be proccessed due to invalid parameters.""" | ||
48 | |||
49 | class MD5SumError(Exception): | ||
50 | """Exception raised when a MD5SUM of a file does not match the expected one""" | ||
51 | |||
52 | def uri_replace(uri, uri_find, uri_replace, d): | ||
53 | # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri) | ||
54 | if not uri or not uri_find or not uri_replace: | ||
55 | bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing") | ||
56 | uri_decoded = list(bb.decodeurl(uri)) | ||
57 | uri_find_decoded = list(bb.decodeurl(uri_find)) | ||
58 | uri_replace_decoded = list(bb.decodeurl(uri_replace)) | ||
59 | result_decoded = ['','','','','',{}] | ||
60 | for i in uri_find_decoded: | ||
61 | loc = uri_find_decoded.index(i) | ||
62 | result_decoded[loc] = uri_decoded[loc] | ||
63 | import types | ||
64 | if type(i) == types.StringType: | ||
65 | import re | ||
66 | if (re.match(i, uri_decoded[loc])): | ||
67 | result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) | ||
68 | if uri_find_decoded.index(i) == 2: | ||
69 | if d: | ||
70 | localfn = bb.fetch.localpath(uri, d) | ||
71 | if localfn: | ||
72 | result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) | ||
73 | # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) | ||
74 | else: | ||
75 | # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match") | ||
76 | return uri | ||
77 | # else: | ||
78 | # for j in i.keys(): | ||
79 | # FIXME: apply replacements against options | ||
80 | return bb.encodeurl(result_decoded) | ||
81 | |||
82 | methods = [] | ||
83 | urldata_cache = {} | ||
84 | |||
85 | def fetcher_init(d): | ||
86 | """ | ||
87 | Called to initilize the fetchers once the configuration data is known | ||
88 | Calls before this must not hit the cache. | ||
89 | """ | ||
90 | pd = persist_data.PersistData(d) | ||
91 | # When to drop SCM head revisions controled by user policy | ||
92 | srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear" | ||
93 | if srcrev_policy == "cache": | ||
94 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy) | ||
95 | elif srcrev_policy == "clear": | ||
96 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy) | ||
97 | pd.delDomain("BB_URI_HEADREVS") | ||
98 | else: | ||
99 | bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy) | ||
100 | # Make sure our domains exist | ||
101 | pd.addDomain("BB_URI_HEADREVS") | ||
102 | pd.addDomain("BB_URI_LOCALCOUNT") | ||
103 | |||
104 | # Function call order is usually: | ||
105 | # 1. init | ||
106 | # 2. go | ||
107 | # 3. localpaths | ||
108 | # localpath can be called at any time | ||
109 | |||
110 | def init(urls, d, setup = True): | ||
111 | urldata = {} | ||
112 | fn = bb.data.getVar('FILE', d, 1) | ||
113 | if fn in urldata_cache: | ||
114 | urldata = urldata_cache[fn] | ||
115 | |||
116 | for url in urls: | ||
117 | if url not in urldata: | ||
118 | urldata[url] = FetchData(url, d) | ||
119 | |||
120 | if setup: | ||
121 | for url in urldata: | ||
122 | if not urldata[url].setup: | ||
123 | urldata[url].setup_localpath(d) | ||
124 | |||
125 | urldata_cache[fn] = urldata | ||
126 | return urldata | ||
127 | |||
128 | def go(d): | ||
129 | """ | ||
130 | Fetch all urls | ||
131 | init must have previously been called | ||
132 | """ | ||
133 | urldata = init([], d, True) | ||
134 | |||
135 | for u in urldata: | ||
136 | ud = urldata[u] | ||
137 | m = ud.method | ||
138 | if ud.localfile: | ||
139 | if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): | ||
140 | # File already present along with md5 stamp file | ||
141 | # Touch md5 file to show activity | ||
142 | try: | ||
143 | os.utime(ud.md5, None) | ||
144 | except: | ||
145 | # Errors aren't fatal here | ||
146 | pass | ||
147 | continue | ||
148 | lf = bb.utils.lockfile(ud.lockfile) | ||
149 | if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5): | ||
150 | # If someone else fetched this before we got the lock, | ||
151 | # notice and don't try again | ||
152 | try: | ||
153 | os.utime(ud.md5, None) | ||
154 | except: | ||
155 | # Errors aren't fatal here | ||
156 | pass | ||
157 | bb.utils.unlockfile(lf) | ||
158 | continue | ||
159 | m.go(u, ud, d) | ||
160 | if ud.localfile: | ||
161 | if not m.forcefetch(u, ud, d): | ||
162 | Fetch.write_md5sum(u, ud, d) | ||
163 | bb.utils.unlockfile(lf) | ||
164 | |||
165 | |||
166 | def checkstatus(d): | ||
167 | """ | ||
168 | Check all urls exist upstream | ||
169 | init must have previously been called | ||
170 | """ | ||
171 | urldata = init([], d, True) | ||
172 | |||
173 | for u in urldata: | ||
174 | ud = urldata[u] | ||
175 | m = ud.method | ||
176 | bb.msg.note(1, bb.msg.domain.Fetcher, "Testing URL %s" % u) | ||
177 | ret = m.checkstatus(u, ud, d) | ||
178 | if not ret: | ||
179 | bb.msg.fatal(bb.msg.domain.Fetcher, "URL %s doesn't work" % u) | ||
180 | |||
181 | def localpaths(d): | ||
182 | """ | ||
183 | Return a list of the local filenames, assuming successful fetch | ||
184 | """ | ||
185 | local = [] | ||
186 | urldata = init([], d, True) | ||
187 | |||
188 | for u in urldata: | ||
189 | ud = urldata[u] | ||
190 | local.append(ud.localpath) | ||
191 | |||
192 | return local | ||
193 | |||
194 | srcrev_internal_call = False | ||
195 | |||
196 | def get_srcrev(d): | ||
197 | """ | ||
198 | Return the version string for the current package | ||
199 | (usually to be used as PV) | ||
200 | Most packages usually only have one SCM so we just pass on the call. | ||
201 | In the multi SCM case, we build a value based on SRCREV_FORMAT which must | ||
202 | have been set. | ||
203 | """ | ||
204 | |||
205 | # | ||
206 | # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which | ||
207 | # could translate into a call to here. If it does, we need to catch this | ||
208 | # and provide some way so it knows get_srcrev is active instead of being | ||
209 | # some number etc. hence the srcrev_internal_call tracking and the magic | ||
210 | # "SRCREVINACTION" return value. | ||
211 | # | ||
212 | # Neater solutions welcome! | ||
213 | # | ||
214 | if bb.fetch.srcrev_internal_call: | ||
215 | return "SRCREVINACTION" | ||
216 | |||
217 | scms = [] | ||
218 | |||
219 | # Only call setup_localpath on URIs which suppports_srcrev() | ||
220 | urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False) | ||
221 | for u in urldata: | ||
222 | ud = urldata[u] | ||
223 | if ud.method.suppports_srcrev(): | ||
224 | if not ud.setup: | ||
225 | ud.setup_localpath(d) | ||
226 | scms.append(u) | ||
227 | |||
228 | if len(scms) == 0: | ||
229 | bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI") | ||
230 | raise ParameterError | ||
231 | |||
232 | if len(scms) == 1: | ||
233 | return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d) | ||
234 | |||
235 | # | ||
236 | # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT | ||
237 | # | ||
238 | format = bb.data.getVar('SRCREV_FORMAT', d, 1) | ||
239 | if not format: | ||
240 | bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.") | ||
241 | raise ParameterError | ||
242 | |||
243 | for scm in scms: | ||
244 | if 'name' in urldata[scm].parm: | ||
245 | name = urldata[scm].parm["name"] | ||
246 | rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d) | ||
247 | format = format.replace(name, rev) | ||
248 | |||
249 | return format | ||
250 | |||
251 | def localpath(url, d, cache = True): | ||
252 | """ | ||
253 | Called from the parser with cache=False since the cache isn't ready | ||
254 | at this point. Also called from classed in OE e.g. patch.bbclass | ||
255 | """ | ||
256 | ud = init([url], d) | ||
257 | if ud[url].method: | ||
258 | return ud[url].localpath | ||
259 | return url | ||
260 | |||
261 | def runfetchcmd(cmd, d, quiet = False): | ||
262 | """ | ||
263 | Run cmd returning the command output | ||
264 | Raise an error if interrupted or cmd fails | ||
265 | Optionally echo command output to stdout | ||
266 | """ | ||
267 | |||
268 | # Need to export PATH as binary could be in metadata paths | ||
269 | # rather than host provided | ||
270 | # Also include some other variables. | ||
271 | # FIXME: Should really include all export varaiables? | ||
272 | exportvars = ['PATH', 'GIT_PROXY_HOST', 'GIT_PROXY_PORT', 'GIT_PROXY_COMMAND'] | ||
273 | |||
274 | for var in exportvars: | ||
275 | val = data.getVar(var, d, True) | ||
276 | if val: | ||
277 | cmd = 'export ' + var + '=%s; %s' % (val, cmd) | ||
278 | |||
279 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd) | ||
280 | |||
281 | # redirect stderr to stdout | ||
282 | stdout_handle = os.popen(cmd + " 2>&1", "r") | ||
283 | output = "" | ||
284 | |||
285 | while 1: | ||
286 | line = stdout_handle.readline() | ||
287 | if not line: | ||
288 | break | ||
289 | if not quiet: | ||
290 | print line, | ||
291 | output += line | ||
292 | |||
293 | status = stdout_handle.close() or 0 | ||
294 | signal = status >> 8 | ||
295 | exitstatus = status & 0xff | ||
296 | |||
297 | if signal: | ||
298 | raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output)) | ||
299 | elif status != 0: | ||
300 | raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output)) | ||
301 | |||
302 | return output | ||
303 | |||
304 | class FetchData(object): | ||
305 | """ | ||
306 | A class which represents the fetcher state for a given URI. | ||
307 | """ | ||
308 | def __init__(self, url, d): | ||
309 | self.localfile = "" | ||
310 | (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d)) | ||
311 | self.date = Fetch.getSRCDate(self, d) | ||
312 | self.url = url | ||
313 | self.setup = False | ||
314 | for m in methods: | ||
315 | if m.supports(url, self, d): | ||
316 | self.method = m | ||
317 | return | ||
318 | raise NoMethodError("Missing implementation for url %s" % url) | ||
319 | |||
320 | def setup_localpath(self, d): | ||
321 | self.setup = True | ||
322 | if "localpath" in self.parm: | ||
323 | # if user sets localpath for file, use it instead. | ||
324 | self.localpath = self.parm["localpath"] | ||
325 | else: | ||
326 | bb.fetch.srcrev_internal_call = True | ||
327 | self.localpath = self.method.localpath(self.url, self, d) | ||
328 | bb.fetch.srcrev_internal_call = False | ||
329 | # We have to clear data's internal caches since the cached value of SRCREV is now wrong. | ||
330 | # Horrible... | ||
331 | bb.data.delVar("ISHOULDNEVEREXIST", d) | ||
332 | self.md5 = self.localpath + '.md5' | ||
333 | self.lockfile = self.localpath + '.lock' | ||
334 | |||
335 | |||
336 | class Fetch(object): | ||
337 | """Base class for 'fetch'ing data""" | ||
338 | |||
339 | def __init__(self, urls = []): | ||
340 | self.urls = [] | ||
341 | |||
342 | def supports(self, url, urldata, d): | ||
343 | """ | ||
344 | Check to see if this fetch class supports a given url. | ||
345 | """ | ||
346 | return 0 | ||
347 | |||
348 | def localpath(self, url, urldata, d): | ||
349 | """ | ||
350 | Return the local filename of a given url assuming a successful fetch. | ||
351 | Can also setup variables in urldata for use in go (saving code duplication | ||
352 | and duplicate code execution) | ||
353 | """ | ||
354 | return url | ||
355 | |||
356 | def setUrls(self, urls): | ||
357 | self.__urls = urls | ||
358 | |||
359 | def getUrls(self): | ||
360 | return self.__urls | ||
361 | |||
362 | urls = property(getUrls, setUrls, None, "Urls property") | ||
363 | |||
364 | def forcefetch(self, url, urldata, d): | ||
365 | """ | ||
366 | Force a fetch, even if localpath exists? | ||
367 | """ | ||
368 | return False | ||
369 | |||
370 | def suppports_srcrev(self): | ||
371 | """ | ||
372 | The fetcher supports auto source revisions (SRCREV) | ||
373 | """ | ||
374 | return False | ||
375 | |||
376 | def go(self, url, urldata, d): | ||
377 | """ | ||
378 | Fetch urls | ||
379 | Assumes localpath was called first | ||
380 | """ | ||
381 | raise NoMethodError("Missing implementation for url") | ||
382 | |||
383 | def checkstatus(self, url, urldata, d): | ||
384 | """ | ||
385 | Check the status of a URL | ||
386 | Assumes localpath was called first | ||
387 | """ | ||
388 | bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url) | ||
389 | return True | ||
390 | |||
391 | def getSRCDate(urldata, d): | ||
392 | """ | ||
393 | Return the SRC Date for the component | ||
394 | |||
395 | d the bb.data module | ||
396 | """ | ||
397 | if "srcdate" in urldata.parm: | ||
398 | return urldata.parm['srcdate'] | ||
399 | |||
400 | pn = data.getVar("PN", d, 1) | ||
401 | |||
402 | if pn: | ||
403 | return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
404 | |||
405 | return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
406 | getSRCDate = staticmethod(getSRCDate) | ||
407 | |||
408 | def srcrev_internal_helper(ud, d): | ||
409 | """ | ||
410 | Return: | ||
411 | a) a source revision if specified | ||
412 | b) True if auto srcrev is in action | ||
413 | c) False otherwise | ||
414 | """ | ||
415 | |||
416 | if 'rev' in ud.parm: | ||
417 | return ud.parm['rev'] | ||
418 | |||
419 | if 'tag' in ud.parm: | ||
420 | return ud.parm['tag'] | ||
421 | |||
422 | rev = None | ||
423 | if 'name' in ud.parm: | ||
424 | pn = data.getVar("PN", d, 1) | ||
425 | rev = data.getVar("SRCREV_pn-" + pn + "_" + ud.parm['name'], d, 1) | ||
426 | if not rev: | ||
427 | rev = data.getVar("SRCREV", d, 1) | ||
428 | if not rev: | ||
429 | return False | ||
430 | if rev is "SRCREVINACTION": | ||
431 | return True | ||
432 | return rev | ||
433 | |||
434 | srcrev_internal_helper = staticmethod(srcrev_internal_helper) | ||
435 | |||
436 | def try_mirror(d, tarfn): | ||
437 | """ | ||
438 | Try to use a mirrored version of the sources. We do this | ||
439 | to avoid massive loads on foreign cvs and svn servers. | ||
440 | This method will be used by the different fetcher | ||
441 | implementations. | ||
442 | |||
443 | d Is a bb.data instance | ||
444 | tarfn is the name of the tarball | ||
445 | """ | ||
446 | tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn) | ||
447 | if os.access(tarpath, os.R_OK): | ||
448 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn) | ||
449 | return True | ||
450 | |||
451 | pn = data.getVar('PN', d, True) | ||
452 | src_tarball_stash = None | ||
453 | if pn: | ||
454 | src_tarball_stash = (data.getVar('SRC_TARBALL_STASH_%s' % pn, d, True) or data.getVar('CVS_TARBALL_STASH_%s' % pn, d, True) or data.getVar('SRC_TARBALL_STASH', d, True) or data.getVar('CVS_TARBALL_STASH', d, True) or "").split() | ||
455 | |||
456 | for stash in src_tarball_stash: | ||
457 | fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True) | ||
458 | uri = stash + tarfn | ||
459 | bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) | ||
460 | fetchcmd = fetchcmd.replace("${URI}", uri) | ||
461 | ret = os.system(fetchcmd) | ||
462 | if ret == 0: | ||
463 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetched %s from tarball stash, skipping checkout" % tarfn) | ||
464 | return True | ||
465 | return False | ||
466 | try_mirror = staticmethod(try_mirror) | ||
467 | |||
468 | def verify_md5sum(ud, got_sum): | ||
469 | """ | ||
470 | Verify the md5sum we wanted with the one we got | ||
471 | """ | ||
472 | wanted_sum = None | ||
473 | if 'md5sum' in ud.parm: | ||
474 | wanted_sum = ud.parm['md5sum'] | ||
475 | if not wanted_sum: | ||
476 | return True | ||
477 | |||
478 | return wanted_sum == got_sum | ||
479 | verify_md5sum = staticmethod(verify_md5sum) | ||
480 | |||
481 | def write_md5sum(url, ud, d): | ||
482 | md5data = bb.utils.md5_file(ud.localpath) | ||
483 | # verify the md5sum | ||
484 | if not Fetch.verify_md5sum(ud, md5data): | ||
485 | raise MD5SumError(url) | ||
486 | |||
487 | md5out = file(ud.md5, 'w') | ||
488 | md5out.write(md5data) | ||
489 | md5out.close() | ||
490 | write_md5sum = staticmethod(write_md5sum) | ||
491 | |||
492 | def latest_revision(self, url, ud, d): | ||
493 | """ | ||
494 | Look in the cache for the latest revision, if not present ask the SCM. | ||
495 | """ | ||
496 | if not hasattr(self, "_latest_revision"): | ||
497 | raise ParameterError | ||
498 | |||
499 | pd = persist_data.PersistData(d) | ||
500 | key = self._revision_key(url, ud, d) | ||
501 | rev = pd.getValue("BB_URI_HEADREVS", key) | ||
502 | if rev != None: | ||
503 | return str(rev) | ||
504 | |||
505 | rev = self._latest_revision(url, ud, d) | ||
506 | pd.setValue("BB_URI_HEADREVS", key, rev) | ||
507 | return rev | ||
508 | |||
509 | def sortable_revision(self, url, ud, d): | ||
510 | """ | ||
511 | |||
512 | """ | ||
513 | if hasattr(self, "_sortable_revision"): | ||
514 | return self._sortable_revision(url, ud, d) | ||
515 | |||
516 | pd = persist_data.PersistData(d) | ||
517 | key = self._revision_key(url, ud, d) | ||
518 | latest_rev = self._build_revision(url, ud, d) | ||
519 | last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev") | ||
520 | count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count") | ||
521 | |||
522 | if last_rev == latest_rev: | ||
523 | return str(count + "+" + latest_rev) | ||
524 | |||
525 | if count is None: | ||
526 | count = "0" | ||
527 | else: | ||
528 | count = str(int(count) + 1) | ||
529 | |||
530 | pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev) | ||
531 | pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count) | ||
532 | |||
533 | return str(count + "+" + latest_rev) | ||
534 | |||
535 | |||
536 | import cvs | ||
537 | import git | ||
538 | import local | ||
539 | import svn | ||
540 | import wget | ||
541 | import svk | ||
542 | import ssh | ||
543 | import perforce | ||
544 | import bzr | ||
545 | import hg | ||
546 | |||
547 | methods.append(local.Local()) | ||
548 | methods.append(wget.Wget()) | ||
549 | methods.append(svn.Svn()) | ||
550 | methods.append(git.Git()) | ||
551 | methods.append(cvs.Cvs()) | ||
552 | methods.append(svk.Svk()) | ||
553 | methods.append(ssh.SSH()) | ||
554 | methods.append(perforce.Perforce()) | ||
555 | methods.append(bzr.Bzr()) | ||
556 | methods.append(hg.Hg()) | ||
diff --git a/bitbake-dev/lib/bb/fetch/bzr.py b/bitbake-dev/lib/bb/fetch/bzr.py new file mode 100644 index 0000000000..b23e9eef86 --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/bzr.py | |||
@@ -0,0 +1,154 @@ | |||
1 | """ | ||
2 | BitBake 'Fetch' implementation for bzr. | ||
3 | |||
4 | """ | ||
5 | |||
6 | # Copyright (C) 2007 Ross Burton | ||
7 | # Copyright (C) 2007 Richard Purdie | ||
8 | # | ||
9 | # Classes for obtaining upstream sources for the | ||
10 | # BitBake build tools. | ||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # | ||
13 | # This program is free software; you can redistribute it and/or modify | ||
14 | # it under the terms of the GNU General Public License version 2 as | ||
15 | # published by the Free Software Foundation. | ||
16 | # | ||
17 | # This program is distributed in the hope that it will be useful, | ||
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
20 | # GNU General Public License for more details. | ||
21 | # | ||
22 | # You should have received a copy of the GNU General Public License along | ||
23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
25 | |||
26 | import os | ||
27 | import sys | ||
28 | import bb | ||
29 | from bb import data | ||
30 | from bb.fetch import Fetch | ||
31 | from bb.fetch import FetchError | ||
32 | from bb.fetch import MissingParameterError | ||
33 | from bb.fetch import runfetchcmd | ||
34 | |||
35 | class Bzr(Fetch): | ||
36 | def supports(self, url, ud, d): | ||
37 | return ud.type in ['bzr'] | ||
38 | |||
39 | def localpath (self, url, ud, d): | ||
40 | |||
41 | # Create paths to bzr checkouts | ||
42 | relpath = ud.path | ||
43 | if relpath.startswith('/'): | ||
44 | # Remove leading slash as os.path.join can't cope | ||
45 | relpath = relpath[1:] | ||
46 | ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) | ||
47 | |||
48 | revision = Fetch.srcrev_internal_helper(ud, d) | ||
49 | if revision is True: | ||
50 | ud.revision = self.latest_revision(url, ud, d) | ||
51 | elif revision: | ||
52 | ud.revision = revision | ||
53 | |||
54 | if not ud.revision: | ||
55 | ud.revision = self.latest_revision(url, ud, d) | ||
56 | |||
57 | ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) | ||
58 | |||
59 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
60 | |||
61 | def _buildbzrcommand(self, ud, d, command): | ||
62 | """ | ||
63 | Build up an bzr commandline based on ud | ||
64 | command is "fetch", "update", "revno" | ||
65 | """ | ||
66 | |||
67 | basecmd = data.expand('${FETCHCMD_bzr}', d) | ||
68 | |||
69 | proto = "http" | ||
70 | if "proto" in ud.parm: | ||
71 | proto = ud.parm["proto"] | ||
72 | |||
73 | bzrroot = ud.host + ud.path | ||
74 | |||
75 | options = [] | ||
76 | |||
77 | if command is "revno": | ||
78 | bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) | ||
79 | else: | ||
80 | if ud.revision: | ||
81 | options.append("-r %s" % ud.revision) | ||
82 | |||
83 | if command is "fetch": | ||
84 | bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) | ||
85 | elif command is "update": | ||
86 | bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) | ||
87 | else: | ||
88 | raise FetchError("Invalid bzr command %s" % command) | ||
89 | |||
90 | return bzrcmd | ||
91 | |||
92 | def go(self, loc, ud, d): | ||
93 | """Fetch url""" | ||
94 | |||
95 | # try to use the tarball stash | ||
96 | if Fetch.try_mirror(d, ud.localfile): | ||
97 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping bzr checkout." % ud.localpath) | ||
98 | return | ||
99 | |||
100 | if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): | ||
101 | bzrcmd = self._buildbzrcommand(ud, d, "update") | ||
102 | bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Update %s" % loc) | ||
103 | os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) | ||
104 | runfetchcmd(bzrcmd, d) | ||
105 | else: | ||
106 | os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir))) | ||
107 | bzrcmd = self._buildbzrcommand(ud, d, "fetch") | ||
108 | bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Checkout %s" % loc) | ||
109 | bb.mkdirhier(ud.pkgdir) | ||
110 | os.chdir(ud.pkgdir) | ||
111 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % bzrcmd) | ||
112 | runfetchcmd(bzrcmd, d) | ||
113 | |||
114 | os.chdir(ud.pkgdir) | ||
115 | # tar them up to a defined filename | ||
116 | try: | ||
117 | runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.pkgdir)), d) | ||
118 | except: | ||
119 | t, v, tb = sys.exc_info() | ||
120 | try: | ||
121 | os.unlink(ud.localpath) | ||
122 | except OSError: | ||
123 | pass | ||
124 | raise t, v, tb | ||
125 | |||
126 | def suppports_srcrev(self): | ||
127 | return True | ||
128 | |||
129 | def _revision_key(self, url, ud, d): | ||
130 | """ | ||
131 | Return a unique key for the url | ||
132 | """ | ||
133 | return "bzr:" + ud.pkgdir | ||
134 | |||
135 | def _latest_revision(self, url, ud, d): | ||
136 | """ | ||
137 | Return the latest upstream revision number | ||
138 | """ | ||
139 | bb.msg.debug(2, bb.msg.domain.Fetcher, "BZR fetcher hitting network for %s" % url) | ||
140 | |||
141 | output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) | ||
142 | |||
143 | return output.strip() | ||
144 | |||
145 | def _sortable_revision(self, url, ud, d): | ||
146 | """ | ||
147 | Return a sortable revision number which in our case is the revision number | ||
148 | """ | ||
149 | |||
150 | return self._build_revision(url, ud, d) | ||
151 | |||
152 | def _build_revision(self, url, ud, d): | ||
153 | return ud.revision | ||
154 | |||
diff --git a/bitbake-dev/lib/bb/fetch/cvs.py b/bitbake-dev/lib/bb/fetch/cvs.py new file mode 100644 index 0000000000..c4ccf4303f --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/cvs.py | |||
@@ -0,0 +1,178 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementations | ||
5 | |||
6 | Classes for obtaining upstream sources for the | ||
7 | BitBake build tools. | ||
8 | |||
9 | """ | ||
10 | |||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # | ||
13 | # This program is free software; you can redistribute it and/or modify | ||
14 | # it under the terms of the GNU General Public License version 2 as | ||
15 | # published by the Free Software Foundation. | ||
16 | # | ||
17 | # This program is distributed in the hope that it will be useful, | ||
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
20 | # GNU General Public License for more details. | ||
21 | # | ||
22 | # You should have received a copy of the GNU General Public License along | ||
23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
25 | # | ||
26 | #Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
27 | # | ||
28 | |||
29 | import os, re | ||
30 | import bb | ||
31 | from bb import data | ||
32 | from bb.fetch import Fetch | ||
33 | from bb.fetch import FetchError | ||
34 | from bb.fetch import MissingParameterError | ||
35 | |||
36 | class Cvs(Fetch): | ||
37 | """ | ||
38 | Class to fetch a module or modules from cvs repositories | ||
39 | """ | ||
40 | def supports(self, url, ud, d): | ||
41 | """ | ||
42 | Check to see if a given url can be fetched with cvs. | ||
43 | """ | ||
44 | return ud.type in ['cvs', 'pserver'] | ||
45 | |||
46 | def localpath(self, url, ud, d): | ||
47 | if not "module" in ud.parm: | ||
48 | raise MissingParameterError("cvs method needs a 'module' parameter") | ||
49 | ud.module = ud.parm["module"] | ||
50 | |||
51 | ud.tag = "" | ||
52 | if 'tag' in ud.parm: | ||
53 | ud.tag = ud.parm['tag'] | ||
54 | |||
55 | # Override the default date in certain cases | ||
56 | if 'date' in ud.parm: | ||
57 | ud.date = ud.parm['date'] | ||
58 | elif ud.tag: | ||
59 | ud.date = "" | ||
60 | |||
61 | norecurse = '' | ||
62 | if 'norecurse' in ud.parm: | ||
63 | norecurse = '_norecurse' | ||
64 | |||
65 | fullpath = '' | ||
66 | if 'fullpath' in ud.parm: | ||
67 | fullpath = '_fullpath' | ||
68 | |||
69 | ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) | ||
70 | |||
71 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
72 | |||
73 | def forcefetch(self, url, ud, d): | ||
74 | if (ud.date == "now"): | ||
75 | return True | ||
76 | return False | ||
77 | |||
78 | def go(self, loc, ud, d): | ||
79 | |||
80 | # try to use the tarball stash | ||
81 | if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): | ||
82 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath) | ||
83 | return | ||
84 | |||
85 | method = "pserver" | ||
86 | if "method" in ud.parm: | ||
87 | method = ud.parm["method"] | ||
88 | |||
89 | localdir = ud.module | ||
90 | if "localdir" in ud.parm: | ||
91 | localdir = ud.parm["localdir"] | ||
92 | |||
93 | cvs_port = "" | ||
94 | if "port" in ud.parm: | ||
95 | cvs_port = ud.parm["port"] | ||
96 | |||
97 | cvs_rsh = None | ||
98 | if method == "ext": | ||
99 | if "rsh" in ud.parm: | ||
100 | cvs_rsh = ud.parm["rsh"] | ||
101 | |||
102 | if method == "dir": | ||
103 | cvsroot = ud.path | ||
104 | else: | ||
105 | cvsroot = ":" + method | ||
106 | cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) | ||
107 | if cvsproxyhost: | ||
108 | cvsroot += ";proxy=" + cvsproxyhost | ||
109 | cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) | ||
110 | if cvsproxyport: | ||
111 | cvsroot += ";proxyport=" + cvsproxyport | ||
112 | cvsroot += ":" + ud.user | ||
113 | if ud.pswd: | ||
114 | cvsroot += ":" + ud.pswd | ||
115 | cvsroot += "@" + ud.host + ":" + cvs_port + ud.path | ||
116 | |||
117 | options = [] | ||
118 | if 'norecurse' in ud.parm: | ||
119 | options.append("-l") | ||
120 | if ud.date: | ||
121 | options.append("-D \"%s UTC\"" % ud.date) | ||
122 | if ud.tag: | ||
123 | options.append("-r %s" % ud.tag) | ||
124 | |||
125 | localdata = data.createCopy(d) | ||
126 | data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
127 | data.update_data(localdata) | ||
128 | |||
129 | data.setVar('CVSROOT', cvsroot, localdata) | ||
130 | data.setVar('CVSCOOPTS', " ".join(options), localdata) | ||
131 | data.setVar('CVSMODULE', ud.module, localdata) | ||
132 | cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
133 | cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) | ||
134 | |||
135 | if cvs_rsh: | ||
136 | cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) | ||
137 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) | ||
138 | |||
139 | # create module directory | ||
140 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory") | ||
141 | pkg = data.expand('${PN}', d) | ||
142 | pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) | ||
143 | moddir = os.path.join(pkgdir,localdir) | ||
144 | if os.access(os.path.join(moddir,'CVS'), os.R_OK): | ||
145 | bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) | ||
146 | # update sources there | ||
147 | os.chdir(moddir) | ||
148 | myret = os.system(cvsupdatecmd) | ||
149 | else: | ||
150 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) | ||
151 | # check out sources there | ||
152 | bb.mkdirhier(pkgdir) | ||
153 | os.chdir(pkgdir) | ||
154 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd) | ||
155 | myret = os.system(cvscmd) | ||
156 | |||
157 | if myret != 0 or not os.access(moddir, os.R_OK): | ||
158 | try: | ||
159 | os.rmdir(moddir) | ||
160 | except OSError: | ||
161 | pass | ||
162 | raise FetchError(ud.module) | ||
163 | |||
164 | # tar them up to a defined filename | ||
165 | if 'fullpath' in ud.parm: | ||
166 | os.chdir(pkgdir) | ||
167 | myret = os.system("tar -czf %s %s" % (ud.localpath, localdir)) | ||
168 | else: | ||
169 | os.chdir(moddir) | ||
170 | os.chdir('..') | ||
171 | myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir))) | ||
172 | |||
173 | if myret != 0: | ||
174 | try: | ||
175 | os.unlink(ud.localpath) | ||
176 | except OSError: | ||
177 | pass | ||
178 | raise FetchError(ud.module) | ||
diff --git a/bitbake-dev/lib/bb/fetch/git.py b/bitbake-dev/lib/bb/fetch/git.py new file mode 100644 index 0000000000..f4ae724f87 --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/git.py | |||
@@ -0,0 +1,142 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' git implementation | ||
5 | |||
6 | """ | ||
7 | |||
8 | #Copyright (C) 2005 Richard Purdie | ||
9 | # | ||
10 | # This program is free software; you can redistribute it and/or modify | ||
11 | # it under the terms of the GNU General Public License version 2 as | ||
12 | # published by the Free Software Foundation. | ||
13 | # | ||
14 | # This program is distributed in the hope that it will be useful, | ||
15 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
16 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
17 | # GNU General Public License for more details. | ||
18 | # | ||
19 | # You should have received a copy of the GNU General Public License along | ||
20 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
21 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
22 | |||
23 | import os, re | ||
24 | import bb | ||
25 | from bb import data | ||
26 | from bb.fetch import Fetch | ||
27 | from bb.fetch import FetchError | ||
28 | from bb.fetch import runfetchcmd | ||
29 | |||
30 | def prunedir(topdir): | ||
31 | # Delete everything reachable from the directory named in 'topdir'. | ||
32 | # CAUTION: This is dangerous! | ||
33 | for root, dirs, files in os.walk(topdir, topdown=False): | ||
34 | for name in files: | ||
35 | os.remove(os.path.join(root, name)) | ||
36 | for name in dirs: | ||
37 | os.rmdir(os.path.join(root, name)) | ||
38 | |||
39 | class Git(Fetch): | ||
40 | """Class to fetch a module or modules from git repositories""" | ||
41 | def supports(self, url, ud, d): | ||
42 | """ | ||
43 | Check to see if a given url can be fetched with git. | ||
44 | """ | ||
45 | return ud.type in ['git'] | ||
46 | |||
47 | def localpath(self, url, ud, d): | ||
48 | |||
49 | ud.proto = "rsync" | ||
50 | if 'protocol' in ud.parm: | ||
51 | ud.proto = ud.parm['protocol'] | ||
52 | |||
53 | ud.branch = ud.parm.get("branch", "master") | ||
54 | |||
55 | tag = Fetch.srcrev_internal_helper(ud, d) | ||
56 | if tag is True: | ||
57 | ud.tag = self.latest_revision(url, ud, d) | ||
58 | elif tag: | ||
59 | ud.tag = tag | ||
60 | |||
61 | if not ud.tag: | ||
62 | ud.tag = self.latest_revision(url, ud, d) | ||
63 | |||
64 | if ud.tag == "master": | ||
65 | ud.tag = self.latest_revision(url, ud, d) | ||
66 | |||
67 | ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d) | ||
68 | |||
69 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
70 | |||
71 | def go(self, loc, ud, d): | ||
72 | """Fetch url""" | ||
73 | |||
74 | if Fetch.try_mirror(d, ud.localfile): | ||
75 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) | ||
76 | return | ||
77 | |||
78 | gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) | ||
79 | |||
80 | repofilename = 'git_%s.tar.gz' % (gitsrcname) | ||
81 | repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) | ||
82 | repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) | ||
83 | |||
84 | coname = '%s' % (ud.tag) | ||
85 | codir = os.path.join(repodir, coname) | ||
86 | |||
87 | if not os.path.exists(repodir): | ||
88 | if Fetch.try_mirror(d, repofilename): | ||
89 | bb.mkdirhier(repodir) | ||
90 | os.chdir(repodir) | ||
91 | runfetchcmd("tar -xzf %s" % (repofile), d) | ||
92 | else: | ||
93 | runfetchcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir), d) | ||
94 | |||
95 | os.chdir(repodir) | ||
96 | # Remove all but the .git directory | ||
97 | runfetchcmd("rm * -Rf", d) | ||
98 | runfetchcmd("git fetch %s://%s%s %s" % (ud.proto, ud.host, ud.path, ud.branch), d) | ||
99 | runfetchcmd("git fetch --tags %s://%s%s" % (ud.proto, ud.host, ud.path), d) | ||
100 | runfetchcmd("git prune-packed", d) | ||
101 | runfetchcmd("git pack-redundant --all | xargs -r rm", d) | ||
102 | |||
103 | os.chdir(repodir) | ||
104 | mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) | ||
105 | if mirror_tarballs != "0": | ||
106 | bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") | ||
107 | runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) | ||
108 | |||
109 | if os.path.exists(codir): | ||
110 | prunedir(codir) | ||
111 | |||
112 | bb.mkdirhier(codir) | ||
113 | os.chdir(repodir) | ||
114 | runfetchcmd("git read-tree %s" % (ud.tag), d) | ||
115 | runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d) | ||
116 | |||
117 | os.chdir(codir) | ||
118 | bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") | ||
119 | runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) | ||
120 | |||
121 | os.chdir(repodir) | ||
122 | prunedir(codir) | ||
123 | |||
124 | def suppports_srcrev(self): | ||
125 | return True | ||
126 | |||
127 | def _revision_key(self, url, ud, d): | ||
128 | """ | ||
129 | Return a unique key for the url | ||
130 | """ | ||
131 | return "git:" + ud.host + ud.path.replace('/', '.') | ||
132 | |||
133 | def _latest_revision(self, url, ud, d): | ||
134 | """ | ||
135 | Compute the HEAD revision for the url | ||
136 | """ | ||
137 | output = runfetchcmd("git ls-remote %s://%s%s %s" % (ud.proto, ud.host, ud.path, ud.branch), d, True) | ||
138 | return output.split()[0] | ||
139 | |||
140 | def _build_revision(self, url, ud, d): | ||
141 | return ud.tag | ||
142 | |||
diff --git a/bitbake-dev/lib/bb/fetch/hg.py b/bitbake-dev/lib/bb/fetch/hg.py new file mode 100644 index 0000000000..ee3bd2f7fe --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/hg.py | |||
@@ -0,0 +1,141 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementation for mercurial DRCS (hg). | ||
5 | |||
6 | """ | ||
7 | |||
8 | # Copyright (C) 2003, 2004 Chris Larson | ||
9 | # Copyright (C) 2004 Marcin Juszkiewicz | ||
10 | # Copyright (C) 2007 Robert Schuster | ||
11 | # | ||
12 | # This program is free software; you can redistribute it and/or modify | ||
13 | # it under the terms of the GNU General Public License version 2 as | ||
14 | # published by the Free Software Foundation. | ||
15 | # | ||
16 | # This program is distributed in the hope that it will be useful, | ||
17 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
18 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
19 | # GNU General Public License for more details. | ||
20 | # | ||
21 | # You should have received a copy of the GNU General Public License along | ||
22 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
23 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
24 | # | ||
25 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
26 | |||
27 | import os, re | ||
28 | import sys | ||
29 | import bb | ||
30 | from bb import data | ||
31 | from bb.fetch import Fetch | ||
32 | from bb.fetch import FetchError | ||
33 | from bb.fetch import MissingParameterError | ||
34 | from bb.fetch import runfetchcmd | ||
35 | |||
36 | class Hg(Fetch): | ||
37 | """Class to fetch a from mercurial repositories""" | ||
38 | def supports(self, url, ud, d): | ||
39 | """ | ||
40 | Check to see if a given url can be fetched with mercurial. | ||
41 | """ | ||
42 | return ud.type in ['hg'] | ||
43 | |||
44 | def localpath(self, url, ud, d): | ||
45 | if not "module" in ud.parm: | ||
46 | raise MissingParameterError("hg method needs a 'module' parameter") | ||
47 | |||
48 | ud.module = ud.parm["module"] | ||
49 | |||
50 | # Create paths to mercurial checkouts | ||
51 | relpath = ud.path | ||
52 | if relpath.startswith('/'): | ||
53 | # Remove leading slash as os.path.join can't cope | ||
54 | relpath = relpath[1:] | ||
55 | ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath) | ||
56 | ud.moddir = os.path.join(ud.pkgdir, ud.module) | ||
57 | |||
58 | if 'rev' in ud.parm: | ||
59 | ud.revision = ud.parm['rev'] | ||
60 | |||
61 | ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) | ||
62 | |||
63 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
64 | |||
65 | def _buildhgcommand(self, ud, d, command): | ||
66 | """ | ||
67 | Build up an hg commandline based on ud | ||
68 | command is "fetch", "update", "info" | ||
69 | """ | ||
70 | |||
71 | basecmd = data.expand('${FETCHCMD_hg}', d) | ||
72 | |||
73 | proto = "http" | ||
74 | if "proto" in ud.parm: | ||
75 | proto = ud.parm["proto"] | ||
76 | |||
77 | host = ud.host | ||
78 | if proto == "file": | ||
79 | host = "/" | ||
80 | ud.host = "localhost" | ||
81 | |||
82 | hgroot = host + ud.path | ||
83 | |||
84 | if command is "info": | ||
85 | return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) | ||
86 | |||
87 | options = []; | ||
88 | if ud.revision: | ||
89 | options.append("-r %s" % ud.revision) | ||
90 | |||
91 | if command is "fetch": | ||
92 | cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) | ||
93 | elif command is "pull": | ||
94 | cmd = "%s pull %s" % (basecmd, " ".join(options)) | ||
95 | elif command is "update": | ||
96 | cmd = "%s update -C %s" % (basecmd, " ".join(options)) | ||
97 | else: | ||
98 | raise FetchError("Invalid hg command %s" % command) | ||
99 | |||
100 | return cmd | ||
101 | |||
102 | def go(self, loc, ud, d): | ||
103 | """Fetch url""" | ||
104 | |||
105 | # try to use the tarball stash | ||
106 | if Fetch.try_mirror(d, ud.localfile): | ||
107 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping hg checkout." % ud.localpath) | ||
108 | return | ||
109 | |||
110 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") | ||
111 | |||
112 | if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): | ||
113 | updatecmd = self._buildhgcommand(ud, d, "pull") | ||
114 | bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) | ||
115 | # update sources there | ||
116 | os.chdir(ud.moddir) | ||
117 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) | ||
118 | runfetchcmd(updatecmd, d) | ||
119 | |||
120 | updatecmd = self._buildhgcommand(ud, d, "update") | ||
121 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) | ||
122 | runfetchcmd(updatecmd, d) | ||
123 | else: | ||
124 | fetchcmd = self._buildhgcommand(ud, d, "fetch") | ||
125 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) | ||
126 | # check out sources there | ||
127 | bb.mkdirhier(ud.pkgdir) | ||
128 | os.chdir(ud.pkgdir) | ||
129 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd) | ||
130 | runfetchcmd(fetchcmd, d) | ||
131 | |||
132 | os.chdir(ud.pkgdir) | ||
133 | try: | ||
134 | runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) | ||
135 | except: | ||
136 | t, v, tb = sys.exc_info() | ||
137 | try: | ||
138 | os.unlink(ud.localpath) | ||
139 | except OSError: | ||
140 | pass | ||
141 | raise t, v, tb | ||
diff --git a/bitbake-dev/lib/bb/fetch/local.py b/bitbake-dev/lib/bb/fetch/local.py new file mode 100644 index 0000000000..54d598ae89 --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/local.py | |||
@@ -0,0 +1,72 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementations | ||
5 | |||
6 | Classes for obtaining upstream sources for the | ||
7 | BitBake build tools. | ||
8 | |||
9 | """ | ||
10 | |||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # | ||
13 | # This program is free software; you can redistribute it and/or modify | ||
14 | # it under the terms of the GNU General Public License version 2 as | ||
15 | # published by the Free Software Foundation. | ||
16 | # | ||
17 | # This program is distributed in the hope that it will be useful, | ||
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
20 | # GNU General Public License for more details. | ||
21 | # | ||
22 | # You should have received a copy of the GNU General Public License along | ||
23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
25 | # | ||
26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
27 | |||
28 | import os, re | ||
29 | import bb | ||
30 | from bb import data | ||
31 | from bb.fetch import Fetch | ||
32 | |||
33 | class Local(Fetch): | ||
34 | def supports(self, url, urldata, d): | ||
35 | """ | ||
36 | Check to see if a given url can be fetched with cvs. | ||
37 | """ | ||
38 | return urldata.type in ['file','patch'] | ||
39 | |||
40 | def localpath(self, url, urldata, d): | ||
41 | """ | ||
42 | Return the local filename of a given url assuming a successful fetch. | ||
43 | """ | ||
44 | path = url.split("://")[1] | ||
45 | path = path.split(";")[0] | ||
46 | newpath = path | ||
47 | if path[0] != "/": | ||
48 | filespath = data.getVar('FILESPATH', d, 1) | ||
49 | if filespath: | ||
50 | newpath = bb.which(filespath, path) | ||
51 | if not newpath: | ||
52 | filesdir = data.getVar('FILESDIR', d, 1) | ||
53 | if filesdir: | ||
54 | newpath = os.path.join(filesdir, path) | ||
55 | # We don't set localfile as for this fetcher the file is already local! | ||
56 | return newpath | ||
57 | |||
58 | def go(self, url, urldata, d): | ||
59 | """Fetch urls (no-op for Local method)""" | ||
60 | # no need to fetch local files, we'll deal with them in place. | ||
61 | return 1 | ||
62 | |||
63 | def checkstatus(self, url, urldata, d): | ||
64 | """ | ||
65 | Check the status of the url | ||
66 | """ | ||
67 | if urldata.localpath.find("*") != -1: | ||
68 | bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url) | ||
69 | return True | ||
70 | if os.path.exists(urldata.localpath): | ||
71 | return True | ||
72 | return False | ||
diff --git a/bitbake-dev/lib/bb/fetch/perforce.py b/bitbake-dev/lib/bb/fetch/perforce.py new file mode 100644 index 0000000000..b594d2bde2 --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/perforce.py | |||
@@ -0,0 +1,213 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementations | ||
5 | |||
6 | Classes for obtaining upstream sources for the | ||
7 | BitBake build tools. | ||
8 | |||
9 | """ | ||
10 | |||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # | ||
13 | # This program is free software; you can redistribute it and/or modify | ||
14 | # it under the terms of the GNU General Public License version 2 as | ||
15 | # published by the Free Software Foundation. | ||
16 | # | ||
17 | # This program is distributed in the hope that it will be useful, | ||
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
20 | # GNU General Public License for more details. | ||
21 | # | ||
22 | # You should have received a copy of the GNU General Public License along | ||
23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
25 | # | ||
26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
27 | |||
28 | import os, re | ||
29 | import bb | ||
30 | from bb import data | ||
31 | from bb.fetch import Fetch | ||
32 | from bb.fetch import FetchError | ||
33 | from bb.fetch import MissingParameterError | ||
34 | |||
35 | class Perforce(Fetch): | ||
36 | def supports(self, url, ud, d): | ||
37 | return ud.type in ['p4'] | ||
38 | |||
39 | def doparse(url,d): | ||
40 | parm = {} | ||
41 | path = url.split("://")[1] | ||
42 | delim = path.find("@"); | ||
43 | if delim != -1: | ||
44 | (user,pswd,host,port) = path.split('@')[0].split(":") | ||
45 | path = path.split('@')[1] | ||
46 | else: | ||
47 | (host,port) = data.getVar('P4PORT', d).split(':') | ||
48 | user = "" | ||
49 | pswd = "" | ||
50 | |||
51 | if path.find(";") != -1: | ||
52 | keys=[] | ||
53 | values=[] | ||
54 | plist = path.split(';') | ||
55 | for item in plist: | ||
56 | if item.count('='): | ||
57 | (key,value) = item.split('=') | ||
58 | keys.append(key) | ||
59 | values.append(value) | ||
60 | |||
61 | parm = dict(zip(keys,values)) | ||
62 | path = "//" + path.split(';')[0] | ||
63 | host += ":%s" % (port) | ||
64 | parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) | ||
65 | |||
66 | return host,path,user,pswd,parm | ||
67 | doparse = staticmethod(doparse) | ||
68 | |||
69 | def getcset(d, depot,host,user,pswd,parm): | ||
70 | if "cset" in parm: | ||
71 | return parm["cset"]; | ||
72 | if user: | ||
73 | data.setVar('P4USER', user, d) | ||
74 | if pswd: | ||
75 | data.setVar('P4PASSWD', pswd, d) | ||
76 | if host: | ||
77 | data.setVar('P4PORT', host, d) | ||
78 | |||
79 | p4date = data.getVar("P4DATE", d, 1) | ||
80 | if "revision" in parm: | ||
81 | depot += "#%s" % (parm["revision"]) | ||
82 | elif "label" in parm: | ||
83 | depot += "@%s" % (parm["label"]) | ||
84 | elif p4date: | ||
85 | depot += "@%s" % (p4date) | ||
86 | |||
87 | p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) | ||
88 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s changes -m 1 %s" % (p4cmd, depot)) | ||
89 | p4file = os.popen("%s changes -m 1 %s" % (p4cmd,depot)) | ||
90 | cset = p4file.readline().strip() | ||
91 | bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset)) | ||
92 | if not cset: | ||
93 | return -1 | ||
94 | |||
95 | return cset.split(' ')[1] | ||
96 | getcset = staticmethod(getcset) | ||
97 | |||
98 | def localpath(self, url, ud, d): | ||
99 | |||
100 | (host,path,user,pswd,parm) = Perforce.doparse(url,d) | ||
101 | |||
102 | # If a label is specified, we use that as our filename | ||
103 | |||
104 | if "label" in parm: | ||
105 | ud.localfile = "%s.tar.gz" % (parm["label"]) | ||
106 | return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) | ||
107 | |||
108 | base = path | ||
109 | which = path.find('/...') | ||
110 | if which != -1: | ||
111 | base = path[:which] | ||
112 | |||
113 | if base[0] == "/": | ||
114 | base = base[1:] | ||
115 | |||
116 | cset = Perforce.getcset(d, path, host, user, pswd, parm) | ||
117 | |||
118 | ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host,base.replace('/', '.'), cset), d) | ||
119 | |||
120 | return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile) | ||
121 | |||
122 | def go(self, loc, ud, d): | ||
123 | """ | ||
124 | Fetch urls | ||
125 | """ | ||
126 | |||
127 | # try to use the tarball stash | ||
128 | if Fetch.try_mirror(d, ud.localfile): | ||
129 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath) | ||
130 | return | ||
131 | |||
132 | (host,depot,user,pswd,parm) = Perforce.doparse(loc, d) | ||
133 | |||
134 | if depot.find('/...') != -1: | ||
135 | path = depot[:depot.find('/...')] | ||
136 | else: | ||
137 | path = depot | ||
138 | |||
139 | if "module" in parm: | ||
140 | module = parm["module"] | ||
141 | else: | ||
142 | module = os.path.basename(path) | ||
143 | |||
144 | localdata = data.createCopy(d) | ||
145 | data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
146 | data.update_data(localdata) | ||
147 | |||
148 | # Get the p4 command | ||
149 | if user: | ||
150 | data.setVar('P4USER', user, localdata) | ||
151 | |||
152 | if pswd: | ||
153 | data.setVar('P4PASSWD', pswd, localdata) | ||
154 | |||
155 | if host: | ||
156 | data.setVar('P4PORT', host, localdata) | ||
157 | |||
158 | p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
159 | |||
160 | # create temp directory | ||
161 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") | ||
162 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
163 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) | ||
164 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
165 | tmpfile = tmppipe.readline().strip() | ||
166 | if not tmpfile: | ||
167 | bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
168 | raise FetchError(module) | ||
169 | |||
170 | if "label" in parm: | ||
171 | depot = "%s@%s" % (depot,parm["label"]) | ||
172 | else: | ||
173 | cset = Perforce.getcset(d, depot, host, user, pswd, parm) | ||
174 | depot = "%s@%s" % (depot,cset) | ||
175 | |||
176 | os.chdir(tmpfile) | ||
177 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) | ||
178 | bb.msg.note(1, bb.msg.domain.Fetcher, "%s files %s" % (p4cmd, depot)) | ||
179 | p4file = os.popen("%s files %s" % (p4cmd, depot)) | ||
180 | |||
181 | if not p4file: | ||
182 | bb.error("Fetch: unable to get the P4 files from %s" % (depot)) | ||
183 | raise FetchError(module) | ||
184 | |||
185 | count = 0 | ||
186 | |||
187 | for file in p4file: | ||
188 | list = file.split() | ||
189 | |||
190 | if list[2] == "delete": | ||
191 | continue | ||
192 | |||
193 | dest = list[0][len(path)+1:] | ||
194 | where = dest.find("#") | ||
195 | |||
196 | os.system("%s print -o %s/%s %s" % (p4cmd, module,dest[:where],list[0])) | ||
197 | count = count + 1 | ||
198 | |||
199 | if count == 0: | ||
200 | bb.error("Fetch: No files gathered from the P4 fetch") | ||
201 | raise FetchError(module) | ||
202 | |||
203 | myret = os.system("tar -czf %s %s" % (ud.localpath, module)) | ||
204 | if myret != 0: | ||
205 | try: | ||
206 | os.unlink(ud.localpath) | ||
207 | except OSError: | ||
208 | pass | ||
209 | raise FetchError(module) | ||
210 | # cleanup | ||
211 | os.system('rm -rf %s' % tmpfile) | ||
212 | |||
213 | |||
diff --git a/bitbake-dev/lib/bb/fetch/ssh.py b/bitbake-dev/lib/bb/fetch/ssh.py new file mode 100644 index 0000000000..81a9892dcc --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/ssh.py | |||
@@ -0,0 +1,120 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | ''' | ||
4 | BitBake 'Fetch' implementations | ||
5 | |||
6 | This implementation is for Secure Shell (SSH), and attempts to comply with the | ||
7 | IETF secsh internet draft: | ||
8 | http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/ | ||
9 | |||
10 | Currently does not support the sftp parameters, as this uses scp | ||
11 | Also does not support the 'fingerprint' connection parameter. | ||
12 | |||
13 | ''' | ||
14 | |||
15 | # Copyright (C) 2006 OpenedHand Ltd. | ||
16 | # | ||
17 | # | ||
18 | # Based in part on svk.py: | ||
19 | # Copyright (C) 2006 Holger Hans Peter Freyther | ||
20 | # Based on svn.py: | ||
21 | # Copyright (C) 2003, 2004 Chris Larson | ||
22 | # Based on functions from the base bb module: | ||
23 | # Copyright 2003 Holger Schurig | ||
24 | # | ||
25 | # | ||
26 | # This program is free software; you can redistribute it and/or modify | ||
27 | # it under the terms of the GNU General Public License version 2 as | ||
28 | # published by the Free Software Foundation. | ||
29 | # | ||
30 | # This program is distributed in the hope that it will be useful, | ||
31 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
32 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
33 | # GNU General Public License for more details. | ||
34 | # | ||
35 | # You should have received a copy of the GNU General Public License along | ||
36 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
37 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
38 | |||
39 | import re, os | ||
40 | import bb | ||
41 | from bb import data | ||
42 | from bb.fetch import Fetch | ||
43 | from bb.fetch import FetchError | ||
44 | from bb.fetch import MissingParameterError | ||
45 | |||
46 | |||
47 | __pattern__ = re.compile(r''' | ||
48 | \s* # Skip leading whitespace | ||
49 | ssh:// # scheme | ||
50 | ( # Optional username/password block | ||
51 | (?P<user>\S+) # username | ||
52 | (:(?P<pass>\S+))? # colon followed by the password (optional) | ||
53 | )? | ||
54 | (?P<cparam>(;[^;]+)*)? # connection parameters block (optional) | ||
55 | @ | ||
56 | (?P<host>\S+?) # non-greedy match of the host | ||
57 | (:(?P<port>[0-9]+))? # colon followed by the port (optional) | ||
58 | / | ||
59 | (?P<path>[^;]+) # path on the remote system, may be absolute or relative, | ||
60 | # and may include the use of '~' to reference the remote home | ||
61 | # directory | ||
62 | (?P<sparam>(;[^;]+)*)? # parameters block (optional) | ||
63 | $ | ||
64 | ''', re.VERBOSE) | ||
65 | |||
66 | class SSH(Fetch): | ||
67 | '''Class to fetch a module or modules via Secure Shell''' | ||
68 | |||
69 | def supports(self, url, urldata, d): | ||
70 | return __pattern__.match(url) != None | ||
71 | |||
72 | def localpath(self, url, urldata, d): | ||
73 | m = __pattern__.match(url) | ||
74 | path = m.group('path') | ||
75 | host = m.group('host') | ||
76 | lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path)) | ||
77 | return lpath | ||
78 | |||
79 | def go(self, url, urldata, d): | ||
80 | dldir = data.getVar('DL_DIR', d, 1) | ||
81 | |||
82 | m = __pattern__.match(url) | ||
83 | path = m.group('path') | ||
84 | host = m.group('host') | ||
85 | port = m.group('port') | ||
86 | user = m.group('user') | ||
87 | password = m.group('pass') | ||
88 | |||
89 | ldir = os.path.join(dldir, host) | ||
90 | lpath = os.path.join(ldir, os.path.basename(path)) | ||
91 | |||
92 | if not os.path.exists(ldir): | ||
93 | os.makedirs(ldir) | ||
94 | |||
95 | if port: | ||
96 | port = '-P %s' % port | ||
97 | else: | ||
98 | port = '' | ||
99 | |||
100 | if user: | ||
101 | fr = user | ||
102 | if password: | ||
103 | fr += ':%s' % password | ||
104 | fr += '@%s' % host | ||
105 | else: | ||
106 | fr = host | ||
107 | fr += ':%s' % path | ||
108 | |||
109 | |||
110 | import commands | ||
111 | cmd = 'scp -B -r %s %s %s/' % ( | ||
112 | port, | ||
113 | commands.mkarg(fr), | ||
114 | commands.mkarg(ldir) | ||
115 | ) | ||
116 | |||
117 | (exitstatus, output) = commands.getstatusoutput(cmd) | ||
118 | if exitstatus != 0: | ||
119 | print output | ||
120 | raise FetchError('Unable to fetch %s' % url) | ||
diff --git a/bitbake-dev/lib/bb/fetch/svk.py b/bitbake-dev/lib/bb/fetch/svk.py new file mode 100644 index 0000000000..d863ccb6e0 --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/svk.py | |||
@@ -0,0 +1,109 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementations | ||
5 | |||
6 | This implementation is for svk. It is based on the svn implementation | ||
7 | |||
8 | """ | ||
9 | |||
10 | # Copyright (C) 2006 Holger Hans Peter Freyther | ||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # | ||
13 | # This program is free software; you can redistribute it and/or modify | ||
14 | # it under the terms of the GNU General Public License version 2 as | ||
15 | # published by the Free Software Foundation. | ||
16 | # | ||
17 | # This program is distributed in the hope that it will be useful, | ||
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
20 | # GNU General Public License for more details. | ||
21 | # | ||
22 | # You should have received a copy of the GNU General Public License along | ||
23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
25 | # | ||
26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
27 | |||
28 | import os, re | ||
29 | import bb | ||
30 | from bb import data | ||
31 | from bb.fetch import Fetch | ||
32 | from bb.fetch import FetchError | ||
33 | from bb.fetch import MissingParameterError | ||
34 | |||
35 | class Svk(Fetch): | ||
36 | """Class to fetch a module or modules from svk repositories""" | ||
37 | def supports(self, url, ud, d): | ||
38 | """ | ||
39 | Check to see if a given url can be fetched with cvs. | ||
40 | """ | ||
41 | return ud.type in ['svk'] | ||
42 | |||
43 | def localpath(self, url, ud, d): | ||
44 | if not "module" in ud.parm: | ||
45 | raise MissingParameterError("svk method needs a 'module' parameter") | ||
46 | else: | ||
47 | ud.module = ud.parm["module"] | ||
48 | |||
49 | ud.revision = "" | ||
50 | if 'rev' in ud.parm: | ||
51 | ud.revision = ud.parm['rev'] | ||
52 | |||
53 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | ||
54 | |||
55 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
56 | |||
57 | def forcefetch(self, url, ud, d): | ||
58 | if (ud.date == "now"): | ||
59 | return True | ||
60 | return False | ||
61 | |||
62 | def go(self, loc, ud, d): | ||
63 | """Fetch urls""" | ||
64 | |||
65 | if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): | ||
66 | return | ||
67 | |||
68 | svkroot = ud.host + ud.path | ||
69 | |||
70 | svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module) | ||
71 | |||
72 | if ud.revision: | ||
73 | svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module) | ||
74 | |||
75 | # create temp directory | ||
76 | localdata = data.createCopy(d) | ||
77 | data.update_data(localdata) | ||
78 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") | ||
79 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
80 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) | ||
81 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
82 | tmpfile = tmppipe.readline().strip() | ||
83 | if not tmpfile: | ||
84 | bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
85 | raise FetchError(ud.module) | ||
86 | |||
87 | # check out sources there | ||
88 | os.chdir(tmpfile) | ||
89 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) | ||
90 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) | ||
91 | myret = os.system(svkcmd) | ||
92 | if myret != 0: | ||
93 | try: | ||
94 | os.rmdir(tmpfile) | ||
95 | except OSError: | ||
96 | pass | ||
97 | raise FetchError(ud.module) | ||
98 | |||
99 | os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) | ||
100 | # tar them up to a defined filename | ||
101 | myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) | ||
102 | if myret != 0: | ||
103 | try: | ||
104 | os.unlink(ud.localpath) | ||
105 | except OSError: | ||
106 | pass | ||
107 | raise FetchError(ud.module) | ||
108 | # cleanup | ||
109 | os.system('rm -rf %s' % tmpfile) | ||
diff --git a/bitbake-dev/lib/bb/fetch/svn.py b/bitbake-dev/lib/bb/fetch/svn.py new file mode 100644 index 0000000000..5e5b31b3ad --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/svn.py | |||
@@ -0,0 +1,204 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementation for svn. | ||
5 | |||
6 | """ | ||
7 | |||
8 | # Copyright (C) 2003, 2004 Chris Larson | ||
9 | # Copyright (C) 2004 Marcin Juszkiewicz | ||
10 | # | ||
11 | # This program is free software; you can redistribute it and/or modify | ||
12 | # it under the terms of the GNU General Public License version 2 as | ||
13 | # published by the Free Software Foundation. | ||
14 | # | ||
15 | # This program is distributed in the hope that it will be useful, | ||
16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
18 | # GNU General Public License for more details. | ||
19 | # | ||
20 | # You should have received a copy of the GNU General Public License along | ||
21 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
22 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
23 | # | ||
24 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
25 | |||
26 | import os, re | ||
27 | import sys | ||
28 | import bb | ||
29 | from bb import data | ||
30 | from bb.fetch import Fetch | ||
31 | from bb.fetch import FetchError | ||
32 | from bb.fetch import MissingParameterError | ||
33 | from bb.fetch import runfetchcmd | ||
34 | |||
35 | class Svn(Fetch): | ||
36 | """Class to fetch a module or modules from svn repositories""" | ||
37 | def supports(self, url, ud, d): | ||
38 | """ | ||
39 | Check to see if a given url can be fetched with svn. | ||
40 | """ | ||
41 | return ud.type in ['svn'] | ||
42 | |||
43 | def localpath(self, url, ud, d): | ||
44 | if not "module" in ud.parm: | ||
45 | raise MissingParameterError("svn method needs a 'module' parameter") | ||
46 | |||
47 | ud.module = ud.parm["module"] | ||
48 | |||
49 | # Create paths to svn checkouts | ||
50 | relpath = ud.path | ||
51 | if relpath.startswith('/'): | ||
52 | # Remove leading slash as os.path.join can't cope | ||
53 | relpath = relpath[1:] | ||
54 | ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) | ||
55 | ud.moddir = os.path.join(ud.pkgdir, ud.module) | ||
56 | |||
57 | if 'rev' in ud.parm: | ||
58 | ud.date = "" | ||
59 | ud.revision = ud.parm['rev'] | ||
60 | elif 'date' in ud.date: | ||
61 | ud.date = ud.parm['date'] | ||
62 | ud.revision = "" | ||
63 | else: | ||
64 | # | ||
65 | # ***Nasty hack*** | ||
66 | # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE) | ||
67 | # Should warn people to switch to SRCREV here | ||
68 | # | ||
69 | pv = data.getVar("PV", d, 0) | ||
70 | if "DATE" in pv: | ||
71 | ud.revision = "" | ||
72 | else: | ||
73 | rev = Fetch.srcrev_internal_helper(ud, d) | ||
74 | if rev is True: | ||
75 | ud.revision = self.latest_revision(url, ud, d) | ||
76 | ud.date = "" | ||
77 | elif rev: | ||
78 | ud.revision = rev | ||
79 | ud.date = "" | ||
80 | else: | ||
81 | ud.revision = "" | ||
82 | |||
83 | ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) | ||
84 | |||
85 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
86 | |||
87 | def _buildsvncommand(self, ud, d, command): | ||
88 | """ | ||
89 | Build up an svn commandline based on ud | ||
90 | command is "fetch", "update", "info" | ||
91 | """ | ||
92 | |||
93 | basecmd = data.expand('${FETCHCMD_svn}', d) | ||
94 | |||
95 | proto = "svn" | ||
96 | if "proto" in ud.parm: | ||
97 | proto = ud.parm["proto"] | ||
98 | |||
99 | svn_rsh = None | ||
100 | if proto == "svn+ssh" and "rsh" in ud.parm: | ||
101 | svn_rsh = ud.parm["rsh"] | ||
102 | |||
103 | svnroot = ud.host + ud.path | ||
104 | |||
105 | # either use the revision, or SRCDATE in braces, | ||
106 | options = [] | ||
107 | |||
108 | if ud.user: | ||
109 | options.append("--username %s" % ud.user) | ||
110 | |||
111 | if ud.pswd: | ||
112 | options.append("--password %s" % ud.pswd) | ||
113 | |||
114 | if command is "info": | ||
115 | svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module) | ||
116 | else: | ||
117 | if ud.revision: | ||
118 | options.append("-r %s" % ud.revision) | ||
119 | elif ud.date: | ||
120 | options.append("-r {%s}" % ud.date) | ||
121 | |||
122 | if command is "fetch": | ||
123 | svncmd = "%s co %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, ud.module) | ||
124 | elif command is "update": | ||
125 | svncmd = "%s update %s" % (basecmd, " ".join(options)) | ||
126 | else: | ||
127 | raise FetchError("Invalid svn command %s" % command) | ||
128 | |||
129 | if svn_rsh: | ||
130 | svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) | ||
131 | |||
132 | return svncmd | ||
133 | |||
134 | def go(self, loc, ud, d): | ||
135 | """Fetch url""" | ||
136 | |||
137 | # try to use the tarball stash | ||
138 | if Fetch.try_mirror(d, ud.localfile): | ||
139 | bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath) | ||
140 | return | ||
141 | |||
142 | bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") | ||
143 | |||
144 | if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): | ||
145 | svnupdatecmd = self._buildsvncommand(ud, d, "update") | ||
146 | bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) | ||
147 | # update sources there | ||
148 | os.chdir(ud.moddir) | ||
149 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd) | ||
150 | runfetchcmd(svnupdatecmd, d) | ||
151 | else: | ||
152 | svnfetchcmd = self._buildsvncommand(ud, d, "fetch") | ||
153 | bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) | ||
154 | # check out sources there | ||
155 | bb.mkdirhier(ud.pkgdir) | ||
156 | os.chdir(ud.pkgdir) | ||
157 | bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd) | ||
158 | runfetchcmd(svnfetchcmd, d) | ||
159 | |||
160 | os.chdir(ud.pkgdir) | ||
161 | # tar them up to a defined filename | ||
162 | try: | ||
163 | runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) | ||
164 | except: | ||
165 | t, v, tb = sys.exc_info() | ||
166 | try: | ||
167 | os.unlink(ud.localpath) | ||
168 | except OSError: | ||
169 | pass | ||
170 | raise t, v, tb | ||
171 | |||
172 | def suppports_srcrev(self): | ||
173 | return True | ||
174 | |||
175 | def _revision_key(self, url, ud, d): | ||
176 | """ | ||
177 | Return a unique key for the url | ||
178 | """ | ||
179 | return "svn:" + ud.moddir | ||
180 | |||
181 | def _latest_revision(self, url, ud, d): | ||
182 | """ | ||
183 | Return the latest upstream revision number | ||
184 | """ | ||
185 | bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url) | ||
186 | |||
187 | output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True) | ||
188 | |||
189 | revision = None | ||
190 | for line in output.splitlines(): | ||
191 | if "Last Changed Rev" in line: | ||
192 | revision = line.split(":")[1].strip() | ||
193 | |||
194 | return revision | ||
195 | |||
196 | def _sortable_revision(self, url, ud, d): | ||
197 | """ | ||
198 | Return a sortable revision number which in our case is the revision number | ||
199 | """ | ||
200 | |||
201 | return self._build_revision(url, ud, d) | ||
202 | |||
203 | def _build_revision(self, url, ud, d): | ||
204 | return ud.revision | ||
diff --git a/bitbake-dev/lib/bb/fetch/wget.py b/bitbake-dev/lib/bb/fetch/wget.py new file mode 100644 index 0000000000..739d5a1bc6 --- /dev/null +++ b/bitbake-dev/lib/bb/fetch/wget.py | |||
@@ -0,0 +1,105 @@ | |||
1 | # ex:ts=4:sw=4:sts=4:et | ||
2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
3 | """ | ||
4 | BitBake 'Fetch' implementations | ||
5 | |||
6 | Classes for obtaining upstream sources for the | ||
7 | BitBake build tools. | ||
8 | |||
9 | """ | ||
10 | |||
11 | # Copyright (C) 2003, 2004 Chris Larson | ||
12 | # | ||
13 | # This program is free software; you can redistribute it and/or modify | ||
14 | # it under the terms of the GNU General Public License version 2 as | ||
15 | # published by the Free Software Foundation. | ||
16 | # | ||
17 | # This program is distributed in the hope that it will be useful, | ||
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
20 | # GNU General Public License for more details. | ||
21 | # | ||
22 | # You should have received a copy of the GNU General Public License along | ||
23 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
24 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
25 | # | ||
26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
27 | |||
28 | import os, re | ||
29 | import bb | ||
30 | from bb import data | ||
31 | from bb.fetch import Fetch | ||
32 | from bb.fetch import FetchError | ||
33 | from bb.fetch import uri_replace | ||
34 | |||
35 | class Wget(Fetch): | ||
36 | """Class to fetch urls via 'wget'""" | ||
37 | def supports(self, url, ud, d): | ||
38 | """ | ||
39 | Check to see if a given url can be fetched with cvs. | ||
40 | """ | ||
41 | return ud.type in ['http','https','ftp'] | ||
42 | |||
43 | def localpath(self, url, ud, d): | ||
44 | |||
45 | url = bb.encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}]) | ||
46 | ud.basename = os.path.basename(ud.path) | ||
47 | ud.localfile = data.expand(os.path.basename(url), d) | ||
48 | |||
49 | return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) | ||
50 | |||
51 | def go(self, uri, ud, d, checkonly = False): | ||
52 | """Fetch urls""" | ||
53 | |||
54 | def fetch_uri(uri, ud, d): | ||
55 | if checkonly: | ||
56 | fetchcmd = data.getVar("CHECKCOMMAND", d, 1) | ||
57 | elif os.path.exists(ud.localpath): | ||
58 | # file exists, but we didnt complete it.. trying again.. | ||
59 | fetchcmd = data.getVar("RESUMECOMMAND", d, 1) | ||
60 | else: | ||
61 | fetchcmd = data.getVar("FETCHCOMMAND", d, 1) | ||
62 | |||
63 | bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) | ||
64 | fetchcmd = fetchcmd.replace("${URI}", uri) | ||
65 | fetchcmd = fetchcmd.replace("${FILE}", ud.basename) | ||
66 | bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) | ||
67 | ret = os.system(fetchcmd) | ||
68 | if ret != 0: | ||
69 | return False | ||
70 | |||
71 | # Sanity check since wget can pretend it succeed when it didn't | ||
72 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
73 | if not os.path.exists(ud.localpath): | ||
74 | bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) | ||
75 | return False | ||
76 | |||
77 | return True | ||
78 | |||
79 | localdata = data.createCopy(d) | ||
80 | data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) | ||
81 | data.update_data(localdata) | ||
82 | |||
83 | premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] | ||
84 | for (find, replace) in premirrors: | ||
85 | newuri = uri_replace(uri, find, replace, d) | ||
86 | if newuri != uri: | ||
87 | if fetch_uri(newuri, ud, localdata): | ||
88 | return True | ||
89 | |||
90 | if fetch_uri(uri, ud, localdata): | ||
91 | return True | ||
92 | |||
93 | # try mirrors | ||
94 | mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] | ||
95 | for (find, replace) in mirrors: | ||
96 | newuri = uri_replace(uri, find, replace, d) | ||
97 | if newuri != uri: | ||
98 | if fetch_uri(newuri, ud, localdata): | ||
99 | return True | ||
100 | |||
101 | raise FetchError(uri) | ||
102 | |||
103 | |||
104 | def checkstatus(self, uri, ud, d): | ||
105 | return self.go(uri, ud, d, True) | ||