summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1538
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py143
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py171
-rw-r--r--bitbake/lib/bb/fetch2/git.py326
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py78
-rw-r--r--bitbake/lib/bb/fetch2/hg.py181
-rw-r--r--bitbake/lib/bb/fetch2/local.py116
-rw-r--r--bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py198
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py129
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py127
-rw-r--r--bitbake/lib/bb/fetch2/svk.py97
-rw-r--r--bitbake/lib/bb/fetch2/svn.py189
-rw-r--r--bitbake/lib/bb/fetch2/wget.py97
15 files changed, 3623 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000000..451d104f67
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1538 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from __future__ import absolute_import
29from __future__ import print_function
30import os, re
31import signal
32import glob
33import logging
34import urllib
35import urlparse
36if 'git' not in urlparse.uses_netloc:
37 urlparse.uses_netloc.append('git')
38from urlparse import urlparse
39import operator
40import bb.persist_data, bb.utils
41import bb.checksum
42from bb import data
43import bb.process
44import subprocess
45
46__version__ = "2"
47_checksum_cache = bb.checksum.FileChecksumCache()
48
49logger = logging.getLogger("BitBake.Fetcher")
50
51class BBFetchException(Exception):
52 """Class all fetch exceptions inherit from"""
53 def __init__(self, message):
54 self.msg = message
55 Exception.__init__(self, message)
56
57 def __str__(self):
58 return self.msg
59
60class MalformedUrl(BBFetchException):
61 """Exception raised when encountering an invalid url"""
62 def __init__(self, url):
63 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
64 self.url = url
65 BBFetchException.__init__(self, msg)
66 self.args = (url,)
67
68class FetchError(BBFetchException):
69 """General fetcher exception when something happens incorrectly"""
70 def __init__(self, message, url = None):
71 if url:
72 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
73 else:
74 msg = "Fetcher failure: %s" % message
75 self.url = url
76 BBFetchException.__init__(self, msg)
77 self.args = (message, url)
78
79class ChecksumError(FetchError):
80 """Exception when mismatched checksum encountered"""
81 def __init__(self, message, url = None, checksum = None):
82 self.checksum = checksum
83 FetchError.__init__(self, message, url)
84
85class NoChecksumError(FetchError):
86 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
87
88class UnpackError(BBFetchException):
89 """General fetcher exception when something happens incorrectly when unpacking"""
90 def __init__(self, message, url):
91 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
92 self.url = url
93 BBFetchException.__init__(self, msg)
94 self.args = (message, url)
95
96class NoMethodError(BBFetchException):
97 """Exception raised when there is no method to obtain a supplied url or set of urls"""
98 def __init__(self, url):
99 msg = "Could not find a fetcher which supports the URL: '%s'" % url
100 self.url = url
101 BBFetchException.__init__(self, msg)
102 self.args = (url,)
103
104class MissingParameterError(BBFetchException):
105 """Exception raised when a fetch method is missing a critical parameter in the url"""
106 def __init__(self, missing, url):
107 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
108 self.url = url
109 self.missing = missing
110 BBFetchException.__init__(self, msg)
111 self.args = (missing, url)
112
113class ParameterError(BBFetchException):
114 """Exception raised when a url cannot be proccessed due to invalid parameters."""
115 def __init__(self, message, url):
116 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
117 self.url = url
118 BBFetchException.__init__(self, msg)
119 self.args = (message, url)
120
121class NetworkAccess(BBFetchException):
122 """Exception raised when network access is disabled but it is required."""
123 def __init__(self, url, cmd):
124 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
125 self.url = url
126 self.cmd = cmd
127 BBFetchException.__init__(self, msg)
128 self.args = (url, cmd)
129
130class NonLocalMethod(Exception):
131 def __init__(self):
132 Exception.__init__(self)
133
134
135class URI(object):
136 """
137 A class representing a generic URI, with methods for
138 accessing the URI components, and stringifies to the
139 URI.
140
141 It is constructed by calling it with a URI, or setting
142 the attributes manually:
143
144 uri = URI("http://example.com/")
145
146 uri = URI()
147 uri.scheme = 'http'
148 uri.hostname = 'example.com'
149 uri.path = '/'
150
151 It has the following attributes:
152
153 * scheme (read/write)
154 * userinfo (authentication information) (read/write)
155 * username (read/write)
156 * password (read/write)
157
158 Note, password is deprecated as of RFC 3986.
159
160 * hostname (read/write)
161 * port (read/write)
162 * hostport (read only)
163 "hostname:port", if both are set, otherwise just "hostname"
164 * path (read/write)
165 * path_quoted (read/write)
166 A URI quoted version of path
167 * params (dict) (read/write)
168 * relative (bool) (read only)
169 True if this is a "relative URI", (e.g. file:foo.diff)
170
171 It stringifies to the URI itself.
172
173 Some notes about relative URIs: while it's specified that
174 a URI beginning with <scheme>:// should either be directly
175 followed by a hostname or a /, the old URI handling of the
176 fetch2 library did not comform to this. Therefore, this URI
177 class has some kludges to make sure that URIs are parsed in
178 a way comforming to bitbake's current usage. This URI class
179 supports the following:
180
181 file:relative/path.diff (IETF compliant)
182 git:relative/path.git (IETF compliant)
183 git:///absolute/path.git (IETF compliant)
184 file:///absolute/path.diff (IETF compliant)
185
186 file://relative/path.diff (not IETF compliant)
187
188 But it does not support the following:
189
190 file://hostname/absolute/path.diff (would be IETF compliant)
191
192 Note that the last case only applies to a list of
193 "whitelisted" schemes (currently only file://), that requires
194 its URIs to not have a network location.
195 """
196
197 _relative_schemes = ['file', 'git']
198 _netloc_forbidden = ['file']
199
200 def __init__(self, uri=None):
201 self.scheme = ''
202 self.userinfo = ''
203 self.hostname = ''
204 self.port = None
205 self._path = ''
206 self.params = {}
207 self.relative = False
208
209 if not uri:
210 return
211
212 urlp = urlparse(uri)
213 self.scheme = urlp.scheme
214
215 # Convert URI to be relative
216 if urlp.scheme in self._netloc_forbidden:
217 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
218 urlp = urlparse(uri)
219
220 # Identify if the URI is relative or not
221 if urlp.scheme in self._relative_schemes and \
222 re.compile("^\w+:(?!//)").match(uri):
223 self.relative = True
224
225 if not self.relative:
226 self.hostname = urlp.hostname or ''
227 self.port = urlp.port
228
229 self.userinfo += urlp.username or ''
230
231 if urlp.password:
232 self.userinfo += ':%s' % urlp.password
233
234 # Do support params even for URI schemes that Python's
235 # urlparse doesn't support params for.
236 path = ''
237 param_str = ''
238 if not urlp.params:
239 path, param_str = (list(urlp.path.split(";", 1)) + [None])[:2]
240 else:
241 path = urlp.path
242 param_str = urlp.params
243
244 self.path = urllib.unquote(path)
245
246 if param_str:
247 self.params = self._param_dict(param_str)
248
249 def __str__(self):
250 userinfo = self.userinfo
251 if userinfo:
252 userinfo += '@'
253
254 return "%s:%s%s%s%s%s" % (
255 self.scheme,
256 '' if self.relative else '//',
257 userinfo,
258 self.hostport,
259 self.path_quoted,
260 self._param_str)
261
262 @property
263 def _param_str(self):
264 ret = ''
265 for key, val in self.params.items():
266 ret += ";%s=%s" % (key, val)
267 return ret
268
269 def _param_dict(self, param_str):
270 parm = {}
271
272 for keyval in param_str.split(";"):
273 key, val = keyval.split("=", 1)
274 parm[key] = val
275
276 return parm
277
278 @property
279 def hostport(self):
280 if not self.port:
281 return self.hostname
282 return "%s:%d" % (self.hostname, self.port)
283
284 @property
285 def path_quoted(self):
286 return urllib.quote(self.path)
287
288 @path_quoted.setter
289 def path_quoted(self, path):
290 self.path = urllib.unquote(path)
291
292 @property
293 def path(self):
294 return self._path
295
296 @path.setter
297 def path(self, path):
298 self._path = path
299
300 if re.compile("^/").match(path):
301 self.relative = False
302 else:
303 self.relative = True
304
305 @property
306 def username(self):
307 if self.userinfo:
308 return (self.userinfo.split(":", 1))[0]
309 return ''
310
311 @username.setter
312 def username(self, username):
313 self.userinfo = username
314 if self.password:
315 self.userinfo += ":%s" % self.password
316
317 @property
318 def password(self):
319 if self.userinfo and ":" in self.userinfo:
320 return (self.userinfo.split(":", 1))[1]
321 return ''
322
323 @password.setter
324 def password(self, password):
325 self.userinfo = "%s:%s" % (self.username, password)
326
327def decodeurl(url):
328 """Decodes an URL into the tokens (scheme, network location, path,
329 user, password, parameters).
330 """
331
332 m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
333 if not m:
334 raise MalformedUrl(url)
335
336 type = m.group('type')
337 location = m.group('location')
338 if not location:
339 raise MalformedUrl(url)
340 user = m.group('user')
341 parm = m.group('parm')
342
343 locidx = location.find('/')
344 if locidx != -1 and type.lower() != 'file':
345 host = location[:locidx]
346 path = location[locidx:]
347 else:
348 host = ""
349 path = location
350 if user:
351 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
352 if m:
353 user = m.group('user')
354 pswd = m.group('pswd')
355 else:
356 user = ''
357 pswd = ''
358
359 p = {}
360 if parm:
361 for s in parm.split(';'):
362 s1, s2 = s.split('=')
363 p[s1] = s2
364
365 return type, host, urllib.unquote(path), user, pswd, p
366
367def encodeurl(decoded):
368 """Encodes a URL from tokens (scheme, network location, path,
369 user, password, parameters).
370 """
371
372 type, host, path, user, pswd, p = decoded
373
374 if not path:
375 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
376 if not type:
377 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
378 url = '%s://' % type
379 if user and type != "file":
380 url += "%s" % user
381 if pswd:
382 url += ":%s" % pswd
383 url += "@"
384 if host and type != "file":
385 url += "%s" % host
386 # Standardise path to ensure comparisons work
387 while '//' in path:
388 path = path.replace("//", "/")
389 url += "%s" % urllib.quote(path)
390 if p:
391 for parm in p:
392 url += ";%s=%s" % (parm, p[parm])
393
394 return url
395
396def uri_replace(ud, uri_find, uri_replace, replacements, d):
397 if not ud.url or not uri_find or not uri_replace:
398 logger.error("uri_replace: passed an undefined value, not replacing")
399 return None
400 uri_decoded = list(decodeurl(ud.url))
401 uri_find_decoded = list(decodeurl(uri_find))
402 uri_replace_decoded = list(decodeurl(uri_replace))
403 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
404 result_decoded = ['', '', '', '', '', {}]
405 for loc, i in enumerate(uri_find_decoded):
406 result_decoded[loc] = uri_decoded[loc]
407 regexp = i
408 if loc == 0 and regexp and not regexp.endswith("$"):
409 # Leaving the type unanchored can mean "https" matching "file" can become "files"
410 # which is clearly undesirable.
411 regexp += "$"
412 if loc == 5:
413 # Handle URL parameters
414 if i:
415 # Any specified URL parameters must match
416 for k in uri_replace_decoded[loc]:
417 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
418 return None
419 # Overwrite any specified replacement parameters
420 for k in uri_replace_decoded[loc]:
421 for l in replacements:
422 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
423 result_decoded[loc][k] = uri_replace_decoded[loc][k]
424 elif (re.match(regexp, uri_decoded[loc])):
425 if not uri_replace_decoded[loc]:
426 result_decoded[loc] = ""
427 else:
428 for k in replacements:
429 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
430 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
431 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
432 if loc == 2:
433 # Handle path manipulations
434 basename = None
435 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
436 # If the source and destination url types differ, must be a mirrortarball mapping
437 basename = os.path.basename(ud.mirrortarball)
438 # Kill parameters, they make no sense for mirror tarballs
439 uri_decoded[5] = {}
440 elif ud.localpath and ud.method.supports_checksum(ud):
441 basename = os.path.basename(ud.localpath)
442 if basename and not result_decoded[loc].endswith(basename):
443 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
444 else:
445 return None
446 result = encodeurl(result_decoded)
447 if result == ud.url:
448 return None
449 logger.debug(2, "For url %s returning %s" % (ud.url, result))
450 return result
451
452methods = []
453urldata_cache = {}
454saved_headrevs = {}
455
456def fetcher_init(d):
457 """
458 Called to initialize the fetchers once the configuration data is known.
459 Calls before this must not hit the cache.
460 """
461 # When to drop SCM head revisions controlled by user policy
462 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
463 if srcrev_policy == "cache":
464 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
465 elif srcrev_policy == "clear":
466 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
467 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
468 try:
469 bb.fetch2.saved_headrevs = revs.items()
470 except:
471 pass
472 revs.clear()
473 else:
474 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
475
476 _checksum_cache.init_cache(d)
477
478 for m in methods:
479 if hasattr(m, "init"):
480 m.init(d)
481
482def fetcher_parse_save(d):
483 _checksum_cache.save_extras(d)
484
485def fetcher_parse_done(d):
486 _checksum_cache.save_merge(d)
487
488def fetcher_compare_revisions(d):
489 """
490 Compare the revisions in the persistant cache with current values and
491 return true/false on whether they've changed.
492 """
493
494 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
495 data2 = bb.fetch2.saved_headrevs
496
497 changed = False
498 for key in data:
499 if key not in data2 or data2[key] != data[key]:
500 logger.debug(1, "%s changed", key)
501 changed = True
502 return True
503 else:
504 logger.debug(2, "%s did not change", key)
505 return False
506
507def mirror_from_string(data):
508 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
509
510def verify_checksum(u, ud, d):
511 """
512 verify the MD5 and SHA256 checksum for downloaded src
513
514 Raises a FetchError if one or both of the SRC_URI checksums do not match
515 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
516 checksums specified.
517
518 """
519
520 if not ud.method.supports_checksum(ud):
521 return
522
523 md5data = bb.utils.md5_file(ud.localpath)
524 sha256data = bb.utils.sha256_file(ud.localpath)
525
526 if ud.method.recommends_checksum(ud):
527 # If strict checking enabled and neither sum defined, raise error
528 strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
529 if (strict and ud.md5_expected == None and ud.sha256_expected == None):
530 raise NoChecksumError('No checksum specified for %s, please add at least one to the recipe:\n'
531 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
532 (ud.localpath, ud.md5_name, md5data,
533 ud.sha256_name, sha256data), u)
534
535 # Log missing sums so user can more easily add them
536 if ud.md5_expected == None:
537 logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
538 'SRC_URI[%s] = "%s"',
539 ud.localpath, ud.md5_name, md5data)
540
541 if ud.sha256_expected == None:
542 logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
543 'SRC_URI[%s] = "%s"',
544 ud.localpath, ud.sha256_name, sha256data)
545
546 md5mismatch = False
547 sha256mismatch = False
548
549 if ud.md5_expected != md5data:
550 md5mismatch = True
551
552 if ud.sha256_expected != sha256data:
553 sha256mismatch = True
554
555 # We want to alert the user if a checksum is defined in the recipe but
556 # it does not match.
557 msg = ""
558 mismatch = False
559 if md5mismatch and ud.md5_expected:
560 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
561 mismatch = True;
562
563 if sha256mismatch and ud.sha256_expected:
564 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
565 mismatch = True;
566
567 if mismatch:
568 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
569
570 if len(msg):
571 raise ChecksumError('Checksum mismatch!%s' % msg, u, md5data)
572
573
574def update_stamp(u, ud, d):
575 """
576 donestamp is file stamp indicating the whole fetching is done
577 this function update the stamp after verifying the checksum
578 """
579 if os.path.exists(ud.donestamp):
580 # Touch the done stamp file to show active use of the download
581 try:
582 os.utime(ud.donestamp, None)
583 except:
584 # Errors aren't fatal here
585 pass
586 else:
587 verify_checksum(u, ud, d)
588 open(ud.donestamp, 'w').close()
589
590def subprocess_setup():
591 # Python installs a SIGPIPE handler by default. This is usually not what
592 # non-Python subprocesses expect.
593 # SIGPIPE errors are known issues with gzip/bash
594 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
595
596def get_autorev(d):
597 # only not cache src rev in autorev case
598 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
599 d.setVar('__BB_DONT_CACHE', '1')
600 return "AUTOINC"
601
602def get_srcrev(d):
603 """
604 Return the version string for the current package
605 (usually to be used as PV)
606 Most packages usually only have one SCM so we just pass on the call.
607 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
608 have been set.
609 """
610
611 scms = []
612 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
613 urldata = fetcher.ud
614 for u in urldata:
615 if urldata[u].method.supports_srcrev():
616 scms.append(u)
617
618 if len(scms) == 0:
619 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
620
621 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
622 autoinc, rev = urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0])
623 if len(rev) > 10:
624 rev = rev[:10]
625 if autoinc:
626 return "AUTOINC+" + rev
627 return rev
628
629 #
630 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
631 #
632 format = d.getVar('SRCREV_FORMAT', True)
633 if not format:
634 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
635
636 seenautoinc = False
637 for scm in scms:
638 ud = urldata[scm]
639 for name in ud.names:
640 autoinc, rev = ud.method.sortable_revision(scm, ud, d, name)
641 seenautoinc = seenautoinc or autoinc
642 if len(rev) > 10:
643 rev = rev[:10]
644 format = format.replace(name, rev)
645 if seenautoinc:
646 format = "AUTOINC+" + format
647
648 return format
649
650def localpath(url, d):
651 fetcher = bb.fetch2.Fetch([url], d)
652 return fetcher.localpath(url)
653
654def runfetchcmd(cmd, d, quiet = False, cleanup = []):
655 """
656 Run cmd returning the command output
657 Raise an error if interrupted or cmd fails
658 Optionally echo command output to stdout
659 Optionally remove the files/directories listed in cleanup upon failure
660 """
661
662 # Need to export PATH as binary could be in metadata paths
663 # rather than host provided
664 # Also include some other variables.
665 # FIXME: Should really include all export varaiables?
666 exportvars = ['HOME', 'PATH',
667 'HTTP_PROXY', 'http_proxy',
668 'HTTPS_PROXY', 'https_proxy',
669 'FTP_PROXY', 'ftp_proxy',
670 'FTPS_PROXY', 'ftps_proxy',
671 'NO_PROXY', 'no_proxy',
672 'ALL_PROXY', 'all_proxy',
673 'GIT_PROXY_COMMAND',
674 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
675 'SOCKS5_USER', 'SOCKS5_PASSWD']
676
677 for var in exportvars:
678 val = d.getVar(var, True)
679 if val:
680 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
681
682 logger.debug(1, "Running %s", cmd)
683
684 success = False
685 error_message = ""
686
687 try:
688 (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
689 success = True
690 except bb.process.NotFoundError as e:
691 error_message = "Fetch command %s" % (e.command)
692 except bb.process.ExecutionError as e:
693 if e.stdout:
694 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
695 elif e.stderr:
696 output = "output:\n%s" % e.stderr
697 else:
698 output = "no output"
699 error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
700 except bb.process.CmdError as e:
701 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
702 if not success:
703 for f in cleanup:
704 try:
705 bb.utils.remove(f, True)
706 except OSError:
707 pass
708
709 raise FetchError(error_message)
710
711 return output
712
713def check_network_access(d, info = "", url = None):
714 """
715 log remote network access, and error if BB_NO_NETWORK is set
716 """
717 if d.getVar("BB_NO_NETWORK", True) == "1":
718 raise NetworkAccess(url, info)
719 else:
720 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
721
722def build_mirroruris(origud, mirrors, ld):
723 uris = []
724 uds = []
725
726 replacements = {}
727 replacements["TYPE"] = origud.type
728 replacements["HOST"] = origud.host
729 replacements["PATH"] = origud.path
730 replacements["BASENAME"] = origud.path.split("/")[-1]
731 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
732
733 def adduri(uri, ud, uris, uds):
734 for line in mirrors:
735 try:
736 (find, replace) = line
737 except ValueError:
738 continue
739 newuri = uri_replace(ud, find, replace, replacements, ld)
740 if not newuri or newuri in uris or newuri == origud.url:
741 continue
742 try:
743 newud = FetchData(newuri, ld)
744 newud.setup_localpath(ld)
745 except bb.fetch2.BBFetchException as e:
746 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
747 logger.debug(1, str(e))
748 try:
749 ud.method.clean(ud, ld)
750 except UnboundLocalError:
751 pass
752 continue
753 uris.append(newuri)
754 uds.append(newud)
755
756 adduri(newuri, newud, uris, uds)
757
758 adduri(None, origud, uris, uds)
759
760 return uris, uds
761
762def rename_bad_checksum(ud, suffix):
763 """
764 Renames files to have suffix from parameter
765 """
766
767 if ud.localpath is None:
768 return
769
770 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
771 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
772 bb.utils.movefile(ud.localpath, new_localpath)
773
774
775def try_mirror_url(newuri, origud, ud, ld, check = False):
776 # Return of None or a value means we're finished
777 # False means try another url
778 try:
779 if check:
780 found = ud.method.checkstatus(newuri, ud, ld)
781 if found:
782 return found
783 return False
784
785 os.chdir(ld.getVar("DL_DIR", True))
786
787 if not os.path.exists(ud.donestamp) or ud.method.need_update(newuri, ud, ld):
788 ud.method.download(newuri, ud, ld)
789 if hasattr(ud.method,"build_mirror_data"):
790 ud.method.build_mirror_data(newuri, ud, ld)
791
792 if not ud.localpath or not os.path.exists(ud.localpath):
793 return False
794
795 if ud.localpath == origud.localpath:
796 return ud.localpath
797
798 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
799 # If that tarball is a local file:// we need to provide a symlink to it
800 dldir = ld.getVar("DL_DIR", True)
801 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
802 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
803 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
804 open(ud.donestamp, 'w').close()
805 dest = os.path.join(dldir, os.path.basename(ud.localpath))
806 if not os.path.exists(dest):
807 os.symlink(ud.localpath, dest)
808 return None
809 # Otherwise the result is a local file:// and we symlink to it
810 if not os.path.exists(origud.localpath):
811 if os.path.islink(origud.localpath):
812 # Broken symbolic link
813 os.unlink(origud.localpath)
814
815 os.symlink(ud.localpath, origud.localpath)
816 update_stamp(newuri, origud, ld)
817 return ud.localpath
818
819 except bb.fetch2.NetworkAccess:
820 raise
821
822 except bb.fetch2.BBFetchException as e:
823 if isinstance(e, ChecksumError):
824 logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (newuri, origud.url))
825 logger.warn(str(e))
826 rename_bad_checksum(ud, e.checksum)
827 elif isinstance(e, NoChecksumError):
828 raise
829 else:
830 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
831 logger.debug(1, str(e))
832 try:
833 ud.method.clean(ud, ld)
834 except UnboundLocalError:
835 pass
836 return False
837
838def try_mirrors(d, origud, mirrors, check = False):
839 """
840 Try to use a mirrored version of the sources.
841 This method will be automatically called before the fetchers go.
842
843 d Is a bb.data instance
844 uri is the original uri we're trying to download
845 mirrors is the list of mirrors we're going to try
846 """
847 ld = d.createCopy()
848
849 uris, uds = build_mirroruris(origud, mirrors, ld)
850
851 for index, uri in enumerate(uris):
852 ret = try_mirror_url(uri, origud, uds[index], ld, check)
853 if ret != False:
854 return ret
855 return None
856
857def srcrev_internal_helper(ud, d, name):
858 """
859 Return:
860 a) a source revision if specified
861 b) latest revision if SRCREV="AUTOINC"
862 c) None if not specified
863 """
864
865 if 'rev' in ud.parm:
866 return ud.parm['rev']
867
868 if 'tag' in ud.parm:
869 return ud.parm['tag']
870
871 rev = None
872 pn = d.getVar("PN", True)
873 if name != '':
874 rev = d.getVar("SRCREV_%s_pn-%s" % (name, pn), True)
875 if not rev:
876 rev = d.getVar("SRCREV_%s" % name, True)
877 if not rev:
878 rev = d.getVar("SRCREV_pn-%s" % pn, True)
879 if not rev:
880 rev = d.getVar("SRCREV", True)
881 if rev == "INVALID":
882 var = "SRCREV_pn-%s" % pn
883 if name != '':
884 var = "SRCREV_%s_pn-%s" % (name, pn)
885 raise FetchError("Please set %s to a valid value" % var, ud.url)
886 if rev == "AUTOINC":
887 rev = ud.method.latest_revision(ud.url, ud, d, name)
888
889 return rev
890
891
892def get_checksum_file_list(d):
893 """ Get a list of files checksum in SRC_URI
894
895 Returns the resolved local paths of all local file entries in
896 SRC_URI as a space-separated string
897 """
898 fetch = Fetch([], d, cache = False, localonly = True)
899
900 dl_dir = d.getVar('DL_DIR', True)
901 filelist = []
902 for u in fetch.urls:
903 ud = fetch.ud[u]
904
905 if ud and isinstance(ud.method, local.Local):
906 ud.setup_localpath(d)
907 f = ud.localpath
908 if f.startswith(dl_dir):
909 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
910 if os.path.exists(f):
911 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
912 else:
913 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
914 continue
915 filelist.append(f)
916
917 return " ".join(filelist)
918
919
920def get_file_checksums(filelist, pn):
921 """Get a list of the checksums for a list of local files
922
923 Returns the checksums for a list of local files, caching the results as
924 it proceeds
925
926 """
927
928 def checksum_file(f):
929 try:
930 checksum = _checksum_cache.get_checksum(f)
931 except OSError as e:
932 bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
933 return None
934 return checksum
935
936 checksums = []
937 for pth in filelist.split():
938 checksum = None
939 if '*' in pth:
940 # Handle globs
941 for f in glob.glob(pth):
942 checksum = checksum_file(f)
943 if checksum:
944 checksums.append((f, checksum))
945 elif os.path.isdir(pth):
946 # Handle directories
947 for root, dirs, files in os.walk(pth):
948 for name in files:
949 fullpth = os.path.join(root, name)
950 checksum = checksum_file(fullpth)
951 if checksum:
952 checksums.append((fullpth, checksum))
953 else:
954 checksum = checksum_file(pth)
955
956 if checksum:
957 checksums.append((pth, checksum))
958
959 checksums.sort(key=operator.itemgetter(1))
960 return checksums
961
962
963class FetchData(object):
964 """
965 A class which represents the fetcher state for a given URI.
966 """
967 def __init__(self, url, d, localonly = False):
968 # localpath is the location of a downloaded result. If not set, the file is local.
969 self.donestamp = None
970 self.localfile = ""
971 self.localpath = None
972 self.lockfile = None
973 self.mirrortarball = None
974 self.basename = None
975 self.basepath = None
976 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
977 self.date = self.getSRCDate(d)
978 self.url = url
979 if not self.user and "user" in self.parm:
980 self.user = self.parm["user"]
981 if not self.pswd and "pswd" in self.parm:
982 self.pswd = self.parm["pswd"]
983 self.setup = False
984
985 if "name" in self.parm:
986 self.md5_name = "%s.md5sum" % self.parm["name"]
987 self.sha256_name = "%s.sha256sum" % self.parm["name"]
988 else:
989 self.md5_name = "md5sum"
990 self.sha256_name = "sha256sum"
991 if self.md5_name in self.parm:
992 self.md5_expected = self.parm[self.md5_name]
993 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
994 self.md5_expected = None
995 else:
996 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
997 if self.sha256_name in self.parm:
998 self.sha256_expected = self.parm[self.sha256_name]
999 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1000 self.sha256_expected = None
1001 else:
1002 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
1003
1004 self.names = self.parm.get("name",'default').split(',')
1005
1006 self.method = None
1007 for m in methods:
1008 if m.supports(url, self, d):
1009 self.method = m
1010 break
1011
1012 if not self.method:
1013 raise NoMethodError(url)
1014
1015 if localonly and not isinstance(self.method, local.Local):
1016 raise NonLocalMethod()
1017
1018 if self.parm.get("proto", None) and "protocol" not in self.parm:
1019 logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
1020 self.parm["protocol"] = self.parm.get("proto", None)
1021
1022 if hasattr(self.method, "urldata_init"):
1023 self.method.urldata_init(self, d)
1024
1025 if "localpath" in self.parm:
1026 # if user sets localpath for file, use it instead.
1027 self.localpath = self.parm["localpath"]
1028 self.basename = os.path.basename(self.localpath)
1029 elif self.localfile:
1030 self.localpath = self.method.localpath(self.url, self, d)
1031
1032 dldir = d.getVar("DL_DIR", True)
1033 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1034 if self.localpath and self.localpath.startswith(dldir):
1035 basepath = self.localpath
1036 elif self.localpath:
1037 basepath = dldir + os.sep + os.path.basename(self.localpath)
1038 else:
1039 basepath = dldir + os.sep + (self.basepath or self.basename)
1040 self.donestamp = basepath + '.done'
1041 self.lockfile = basepath + '.lock'
1042
1043 def setup_revisons(self, d):
1044 self.revisions = {}
1045 for name in self.names:
1046 self.revisions[name] = srcrev_internal_helper(self, d, name)
1047
1048 # add compatibility code for non name specified case
1049 if len(self.names) == 1:
1050 self.revision = self.revisions[self.names[0]]
1051
1052 def setup_localpath(self, d):
1053 if not self.localpath:
1054 self.localpath = self.method.localpath(self.url, self, d)
1055
1056 def getSRCDate(self, d):
1057 """
1058 Return the SRC Date for the component
1059
1060 d the bb.data module
1061 """
1062 if "srcdate" in self.parm:
1063 return self.parm['srcdate']
1064
1065 pn = d.getVar("PN", True)
1066
1067 if pn:
1068 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1069
1070 return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1071
1072class FetchMethod(object):
1073 """Base class for 'fetch'ing data"""
1074
1075 def __init__(self, urls = []):
1076 self.urls = []
1077
1078 def supports(self, url, urldata, d):
1079 """
1080 Check to see if this fetch class supports a given url.
1081 """
1082 return 0
1083
1084 def localpath(self, url, urldata, d):
1085 """
1086 Return the local filename of a given url assuming a successful fetch.
1087 Can also setup variables in urldata for use in go (saving code duplication
1088 and duplicate code execution)
1089 """
1090 return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
1091
1092 def supports_checksum(self, urldata):
1093 """
1094 Is localpath something that can be represented by a checksum?
1095 """
1096
1097 # We cannot compute checksums for directories
1098 if os.path.isdir(urldata.localpath) == True:
1099 return False
1100 if urldata.localpath.find("*") != -1:
1101 return False
1102
1103 return True
1104
1105 def recommends_checksum(self, urldata):
1106 """
1107 Is the backend on where checksumming is recommended (should warnings
1108 be displayed if there is no checksum)?
1109 """
1110 return False
1111
1112 def _strip_leading_slashes(self, relpath):
1113 """
1114 Remove leading slash as os.path.join can't cope
1115 """
1116 while os.path.isabs(relpath):
1117 relpath = relpath[1:]
1118 return relpath
1119
1120 def setUrls(self, urls):
1121 self.__urls = urls
1122
1123 def getUrls(self):
1124 return self.__urls
1125
1126 urls = property(getUrls, setUrls, None, "Urls property")
1127
1128 def need_update(self, url, ud, d):
1129 """
1130 Force a fetch, even if localpath exists?
1131 """
1132 if os.path.exists(ud.localpath):
1133 return False
1134 return True
1135
1136 def supports_srcrev(self):
1137 """
1138 The fetcher supports auto source revisions (SRCREV)
1139 """
1140 return False
1141
1142 def download(self, url, urldata, d):
1143 """
1144 Fetch urls
1145 Assumes localpath was called first
1146 """
1147 raise NoMethodError(url)
1148
1149 def unpack(self, urldata, rootdir, data):
1150 iterate = False
1151 file = urldata.localpath
1152
1153 try:
1154 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1155 except ValueError as exc:
1156 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1157 (file, urldata.parm.get('unpack')))
1158
1159 dots = file.split(".")
1160 if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
1161 efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
1162 else:
1163 efile = file
1164 cmd = None
1165
1166 if unpack:
1167 if file.endswith('.tar'):
1168 cmd = 'tar x --no-same-owner -f %s' % file
1169 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1170 cmd = 'tar xz --no-same-owner -f %s' % file
1171 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1172 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1173 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1174 cmd = 'gzip -dc %s > %s' % (file, efile)
1175 elif file.endswith('.bz2'):
1176 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1177 elif file.endswith('.tar.xz'):
1178 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1179 elif file.endswith('.xz'):
1180 cmd = 'xz -dc %s > %s' % (file, efile)
1181 elif file.endswith('.zip') or file.endswith('.jar'):
1182 try:
1183 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1184 except ValueError as exc:
1185 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1186 (file, urldata.parm.get('dos')))
1187 cmd = 'unzip -q -o'
1188 if dos:
1189 cmd = '%s -a' % cmd
1190 cmd = "%s '%s'" % (cmd, file)
1191 elif file.endswith('.rpm') or file.endswith('.srpm'):
1192 if 'extract' in urldata.parm:
1193 unpack_file = urldata.parm.get('extract')
1194 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1195 iterate = True
1196 iterate_file = unpack_file
1197 else:
1198 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1199 elif file.endswith('.deb') or file.endswith('.ipk'):
1200 cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
1201
1202 if not unpack or not cmd:
1203 # If file == dest, then avoid any copies, as we already put the file into dest!
1204 dest = os.path.join(rootdir, os.path.basename(file))
1205 if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1206 if os.path.isdir(file):
1207 # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
1208 basepath = getattr(urldata, "basepath", None)
1209 destdir = "."
1210 if basepath and basepath.endswith("/"):
1211 basepath = basepath.rstrip("/")
1212 elif basepath:
1213 basepath = os.path.dirname(basepath)
1214 if basepath and basepath.find("/") != -1:
1215 destdir = basepath[:basepath.rfind('/')]
1216 destdir = destdir.strip('/')
1217 if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
1218 os.makedirs("%s/%s" % (rootdir, destdir))
1219 cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir)
1220 #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
1221 else:
1222 # The "destdir" handling was specifically done for FILESPATH
1223 # items. So, only do so for file:// entries.
1224 if urldata.type == "file" and urldata.path.find("/") != -1:
1225 destdir = urldata.path.rsplit("/", 1)[0]
1226 else:
1227 destdir = "."
1228 bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
1229 cmd = 'cp %s %s/%s/' % (file, rootdir, destdir)
1230
1231 if not cmd:
1232 return
1233
1234 # Change to subdir before executing command
1235 save_cwd = os.getcwd();
1236 os.chdir(rootdir)
1237 if 'subdir' in urldata.parm:
1238 newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
1239 bb.utils.mkdirhier(newdir)
1240 os.chdir(newdir)
1241
1242 path = data.getVar('PATH', True)
1243 if path:
1244 cmd = "PATH=\"%s\" %s" % (path, cmd)
1245 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
1246 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
1247
1248 os.chdir(save_cwd)
1249
1250 if ret != 0:
1251 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1252
1253 if iterate is True:
1254 iterate_urldata = urldata
1255 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1256 self.unpack(urldata, rootdir, data)
1257
1258 return
1259
1260 def clean(self, urldata, d):
1261 """
1262 Clean any existing full or partial download
1263 """
1264 bb.utils.remove(urldata.localpath)
1265
1266 def try_premirror(self, url, urldata, d):
1267 """
1268 Should premirrors be used?
1269 """
1270 return True
1271
1272 def checkstatus(self, url, urldata, d):
1273 """
1274 Check the status of a URL
1275 Assumes localpath was called first
1276 """
1277 logger.info("URL %s could not be checked for status since no method exists.", url)
1278 return True
1279
1280 def latest_revision(self, url, ud, d, name):
1281 """
1282 Look in the cache for the latest revision, if not present ask the SCM.
1283 """
1284 if not hasattr(self, "_latest_revision"):
1285 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1286
1287 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1288 key = self.generate_revision_key(url, ud, d, name)
1289 try:
1290 return revs[key]
1291 except KeyError:
1292 revs[key] = rev = self._latest_revision(url, ud, d, name)
1293 return rev
1294
1295 def sortable_revision(self, url, ud, d, name):
1296 latest_rev = self._build_revision(url, ud, d, name)
1297 return True, str(latest_rev)
1298
1299 def generate_revision_key(self, url, ud, d, name):
1300 key = self._revision_key(url, ud, d, name)
1301 return "%s-%s" % (key, d.getVar("PN", True) or "")
1302
1303class Fetch(object):
1304 def __init__(self, urls, d, cache = True, localonly = False):
1305 if localonly and cache:
1306 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1307
1308 if len(urls) == 0:
1309 urls = d.getVar("SRC_URI", True).split()
1310 self.urls = urls
1311 self.d = d
1312 self.ud = {}
1313
1314 fn = d.getVar('FILE', True)
1315 if cache and fn and fn in urldata_cache:
1316 self.ud = urldata_cache[fn]
1317
1318 for url in urls:
1319 if url not in self.ud:
1320 try:
1321 self.ud[url] = FetchData(url, d, localonly)
1322 except NonLocalMethod:
1323 if localonly:
1324 self.ud[url] = None
1325 pass
1326
1327 if fn and cache:
1328 urldata_cache[fn] = self.ud
1329
1330 def localpath(self, url):
1331 if url not in self.urls:
1332 self.ud[url] = FetchData(url, self.d)
1333
1334 self.ud[url].setup_localpath(self.d)
1335 return self.d.expand(self.ud[url].localpath)
1336
1337 def localpaths(self):
1338 """
1339 Return a list of the local filenames, assuming successful fetch
1340 """
1341 local = []
1342
1343 for u in self.urls:
1344 ud = self.ud[u]
1345 ud.setup_localpath(self.d)
1346 local.append(ud.localpath)
1347
1348 return local
1349
1350 def download(self, urls = []):
1351 """
1352 Fetch all urls
1353 """
1354 if len(urls) == 0:
1355 urls = self.urls
1356
1357 network = self.d.getVar("BB_NO_NETWORK", True)
1358 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1359
1360 for u in urls:
1361 ud = self.ud[u]
1362 ud.setup_localpath(self.d)
1363 m = ud.method
1364 localpath = ""
1365
1366 lf = bb.utils.lockfile(ud.lockfile)
1367
1368 try:
1369 self.d.setVar("BB_NO_NETWORK", network)
1370
1371 if os.path.exists(ud.donestamp) and not m.need_update(u, ud, self.d):
1372 localpath = ud.localpath
1373 elif m.try_premirror(u, ud, self.d):
1374 logger.debug(1, "Trying PREMIRRORS")
1375 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1376 localpath = try_mirrors(self.d, ud, mirrors, False)
1377
1378 if premirroronly:
1379 self.d.setVar("BB_NO_NETWORK", "1")
1380
1381 os.chdir(self.d.getVar("DL_DIR", True))
1382
1383 firsterr = None
1384 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(u, ud, self.d)):
1385 try:
1386 logger.debug(1, "Trying Upstream")
1387 m.download(u, ud, self.d)
1388 if hasattr(m, "build_mirror_data"):
1389 m.build_mirror_data(u, ud, self.d)
1390 localpath = ud.localpath
1391 # early checksum verify, so that if checksum mismatched,
1392 # fetcher still have chance to fetch from mirror
1393 update_stamp(u, ud, self.d)
1394
1395 except bb.fetch2.NetworkAccess:
1396 raise
1397
1398 except BBFetchException as e:
1399 if isinstance(e, ChecksumError):
1400 logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1401 logger.debug(1, str(e))
1402 rename_bad_checksum(ud, e.checksum)
1403 elif isinstance(e, NoChecksumError):
1404 raise
1405 else:
1406 logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1407 logger.debug(1, str(e))
1408 firsterr = e
1409 # Remove any incomplete fetch
1410 m.clean(ud, self.d)
1411 logger.debug(1, "Trying MIRRORS")
1412 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1413 localpath = try_mirrors (self.d, ud, mirrors)
1414
1415 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1416 if firsterr:
1417 logger.error(str(firsterr))
1418 raise FetchError("Unable to fetch URL from any source.", u)
1419
1420 update_stamp(u, ud, self.d)
1421
1422 except BBFetchException as e:
1423 if isinstance(e, NoChecksumError):
1424 logger.error("%s" % str(e))
1425 elif isinstance(e, ChecksumError):
1426 logger.error("Checksum failure fetching %s" % u)
1427 raise
1428
1429 finally:
1430 bb.utils.unlockfile(lf)
1431
1432 def checkstatus(self, urls = []):
1433 """
1434 Check all urls exist upstream
1435 """
1436
1437 if len(urls) == 0:
1438 urls = self.urls
1439
1440 for u in urls:
1441 ud = self.ud[u]
1442 ud.setup_localpath(self.d)
1443 m = ud.method
1444 logger.debug(1, "Testing URL %s", u)
1445 # First try checking uri, u, from PREMIRRORS
1446 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1447 ret = try_mirrors(self.d, ud, mirrors, True)
1448 if not ret:
1449 # Next try checking from the original uri, u
1450 try:
1451 ret = m.checkstatus(u, ud, self.d)
1452 except:
1453 # Finally, try checking uri, u, from MIRRORS
1454 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1455 ret = try_mirrors(self.d, ud, mirrors, True)
1456
1457 if not ret:
1458 raise FetchError("URL %s doesn't work" % u, u)
1459
1460 def unpack(self, root, urls = []):
1461 """
1462 Check all urls exist upstream
1463 """
1464
1465 if len(urls) == 0:
1466 urls = self.urls
1467
1468 for u in urls:
1469 ud = self.ud[u]
1470 ud.setup_localpath(self.d)
1471
1472 if self.d.expand(self.localpath) is None:
1473 continue
1474
1475 if ud.lockfile:
1476 lf = bb.utils.lockfile(ud.lockfile)
1477
1478 ud.method.unpack(ud, root, self.d)
1479
1480 if ud.lockfile:
1481 bb.utils.unlockfile(lf)
1482
1483 def clean(self, urls = []):
1484 """
1485 Clean files that the fetcher gets or places
1486 """
1487
1488 if len(urls) == 0:
1489 urls = self.urls
1490
1491 for url in urls:
1492 if url not in self.ud:
1493 self.ud[url] = FetchData(url, d)
1494 ud = self.ud[url]
1495 ud.setup_localpath(self.d)
1496
1497 if not ud.localfile or self.localpath is None:
1498 continue
1499
1500 if ud.lockfile:
1501 lf = bb.utils.lockfile(ud.lockfile)
1502
1503 ud.method.clean(ud, self.d)
1504 if ud.donestamp:
1505 bb.utils.remove(ud.donestamp)
1506
1507 if ud.lockfile:
1508 bb.utils.unlockfile(lf)
1509
1510from . import cvs
1511from . import git
1512from . import gitsm
1513from . import local
1514from . import svn
1515from . import wget
1516from . import svk
1517from . import ssh
1518from . import sftp
1519from . import perforce
1520from . import bzr
1521from . import hg
1522from . import osc
1523from . import repo
1524
1525methods.append(local.Local())
1526methods.append(wget.Wget())
1527methods.append(svn.Svn())
1528methods.append(git.Git())
1529methods.append(gitsm.GitSM())
1530methods.append(cvs.Cvs())
1531methods.append(svk.Svk())
1532methods.append(ssh.SSH())
1533methods.append(sftp.SFTP())
1534methods.append(perforce.Perforce())
1535methods.append(bzr.Bzr())
1536methods.append(hg.Hg())
1537methods.append(osc.Osc())
1538methods.append(repo.Repo())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
new file mode 100644
index 0000000000..5d9e5f907c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/bzr.py
@@ -0,0 +1,143 @@
1"""
2BitBake 'Fetch' implementation for bzr.
3
4"""
5
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 Richard Purdie
8#
9# Classes for obtaining upstream sources for the
10# BitBake build tools.
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import sys
28import logging
29import bb
30from bb import data
31from bb.fetch2 import FetchMethod
32from bb.fetch2 import FetchError
33from bb.fetch2 import runfetchcmd
34from bb.fetch2 import logger
35
36class Bzr(FetchMethod):
37 def supports(self, url, ud, d):
38 return ud.type in ['bzr']
39
40 def urldata_init(self, ud, d):
41 """
42 init bzr specific variable within url data
43 """
44 # Create paths to bzr checkouts
45 relpath = self._strip_leading_slashes(ud.path)
46 ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
47
48 ud.setup_revisons(d)
49
50 if not ud.revision:
51 ud.revision = self.latest_revision(ud.url, ud, d)
52
53 ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
54
55 def _buildbzrcommand(self, ud, d, command):
56 """
57 Build up an bzr commandline based on ud
58 command is "fetch", "update", "revno"
59 """
60
61 basecmd = data.expand('${FETCHCMD_bzr}', d)
62
63 proto = ud.parm.get('protocol', 'http')
64
65 bzrroot = ud.host + ud.path
66
67 options = []
68
69 if command == "revno":
70 bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
71 else:
72 if ud.revision:
73 options.append("-r %s" % ud.revision)
74
75 if command == "fetch":
76 bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
77 elif command == "update":
78 bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
79 else:
80 raise FetchError("Invalid bzr command %s" % command, ud.url)
81
82 return bzrcmd
83
84 def download(self, loc, ud, d):
85 """Fetch url"""
86
87 if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
88 bzrcmd = self._buildbzrcommand(ud, d, "update")
89 logger.debug(1, "BZR Update %s", loc)
90 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
91 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
92 runfetchcmd(bzrcmd, d)
93 else:
94 bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
95 bzrcmd = self._buildbzrcommand(ud, d, "fetch")
96 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
97 logger.debug(1, "BZR Checkout %s", loc)
98 bb.utils.mkdirhier(ud.pkgdir)
99 os.chdir(ud.pkgdir)
100 logger.debug(1, "Running %s", bzrcmd)
101 runfetchcmd(bzrcmd, d)
102
103 os.chdir(ud.pkgdir)
104
105 scmdata = ud.parm.get("scmdata", "")
106 if scmdata == "keep":
107 tar_flags = ""
108 else:
109 tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
110
111 # tar them up to a defined filename
112 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
113
114 def supports_srcrev(self):
115 return True
116
117 def _revision_key(self, url, ud, d, name):
118 """
119 Return a unique key for the url
120 """
121 return "bzr:" + ud.pkgdir
122
123 def _latest_revision(self, url, ud, d, name):
124 """
125 Return the latest upstream revision number
126 """
127 logger.debug(2, "BZR fetcher hitting network for %s", url)
128
129 bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
130
131 output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
132
133 return output.strip()
134
135 def sortable_revision(self, url, ud, d, name):
136 """
137 Return a sortable revision number which in our case is the revision number
138 """
139
140 return False, self._build_revision(url, ud, d)
141
142 def _build_revision(self, url, ud, d):
143 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
new file mode 100644
index 0000000000..0a672a33ef
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -0,0 +1,171 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27#
28
29import os
30import logging
31import bb
32from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
33from bb.fetch2 import runfetchcmd
34
35class Cvs(FetchMethod):
36 """
37 Class to fetch a module or modules from cvs repositories
38 """
39 def supports(self, url, ud, d):
40 """
41 Check to see if a given url can be fetched with cvs.
42 """
43 return ud.type in ['cvs']
44
45 def urldata_init(self, ud, d):
46 if not "module" in ud.parm:
47 raise MissingParameterError("module", ud.url)
48 ud.module = ud.parm["module"]
49
50 ud.tag = ud.parm.get('tag', "")
51
52 # Override the default date in certain cases
53 if 'date' in ud.parm:
54 ud.date = ud.parm['date']
55 elif ud.tag:
56 ud.date = ""
57
58 norecurse = ''
59 if 'norecurse' in ud.parm:
60 norecurse = '_norecurse'
61
62 fullpath = ''
63 if 'fullpath' in ud.parm:
64 fullpath = '_fullpath'
65
66 ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
67
68 def need_update(self, url, ud, d):
69 if (ud.date == "now"):
70 return True
71 if not os.path.exists(ud.localpath):
72 return True
73 return False
74
75 def download(self, loc, ud, d):
76
77 method = ud.parm.get('method', 'pserver')
78 localdir = ud.parm.get('localdir', ud.module)
79 cvs_port = ud.parm.get('port', '')
80
81 cvs_rsh = None
82 if method == "ext":
83 if "rsh" in ud.parm:
84 cvs_rsh = ud.parm["rsh"]
85
86 if method == "dir":
87 cvsroot = ud.path
88 else:
89 cvsroot = ":" + method
90 cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
91 if cvsproxyhost:
92 cvsroot += ";proxy=" + cvsproxyhost
93 cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
94 if cvsproxyport:
95 cvsroot += ";proxyport=" + cvsproxyport
96 cvsroot += ":" + ud.user
97 if ud.pswd:
98 cvsroot += ":" + ud.pswd
99 cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
100
101 options = []
102 if 'norecurse' in ud.parm:
103 options.append("-l")
104 if ud.date:
105 # treat YYYYMMDDHHMM specially for CVS
106 if len(ud.date) == 12:
107 options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
108 else:
109 options.append("-D \"%s UTC\"" % ud.date)
110 if ud.tag:
111 options.append("-r %s" % ud.tag)
112
113 cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
114 cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
115 cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
116
117 if cvs_rsh:
118 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
119 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
120
121 # create module directory
122 logger.debug(2, "Fetch: checking for module directory")
123 pkg = d.getVar('PN', True)
124 pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
125 moddir = os.path.join(pkgdir, localdir)
126 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
127 logger.info("Update " + loc)
128 bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
129 # update sources there
130 os.chdir(moddir)
131 cmd = cvsupdatecmd
132 else:
133 logger.info("Fetch " + loc)
134 # check out sources there
135 bb.utils.mkdirhier(pkgdir)
136 os.chdir(pkgdir)
137 logger.debug(1, "Running %s", cvscmd)
138 bb.fetch2.check_network_access(d, cvscmd, ud.url)
139 cmd = cvscmd
140
141 runfetchcmd(cmd, d, cleanup = [moddir])
142
143 if not os.access(moddir, os.R_OK):
144 raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
145
146 scmdata = ud.parm.get("scmdata", "")
147 if scmdata == "keep":
148 tar_flags = ""
149 else:
150 tar_flags = "--exclude 'CVS'"
151
152 # tar them up to a defined filename
153 if 'fullpath' in ud.parm:
154 os.chdir(pkgdir)
155 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
156 else:
157 os.chdir(moddir)
158 os.chdir('..')
159 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
160
161 runfetchcmd(cmd, d, cleanup = [ud.localpath])
162
163 def clean(self, ud, d):
164 """ Clean CVS Files and tarballs """
165
166 pkg = d.getVar('PN', True)
167 pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
168
169 bb.utils.remove(pkgdir, True)
170 bb.utils.remove(ud.localpath)
171
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
new file mode 100644
index 0000000000..6175e4c7c9
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -0,0 +1,326 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git implementation
5
6git fetcher support the SRC_URI with format of:
7SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
8
9Supported SRC_URI options are:
10
11- branch
12 The git branch to retrieve from. The default is "master"
13
14 This option also supports multiple branch fetching, with branches
15 separated by commas. In multiple branches case, the name option
16 must have the same number of names to match the branches, which is
17 used to specify the SRC_REV for the branch
18 e.g:
19 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
20 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
21 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
22
23- tag
24 The git tag to retrieve. The default is "master"
25
26- protocol
27 The method to use to access the repository. Common options are "git",
28 "http", "https", "file", "ssh" and "rsync". The default is "git".
29
30- rebaseable
31 rebaseable indicates that the upstream git repo may rebase in the future,
32 and current revision may disappear from upstream repo. This option will
33 remind fetcher to preserve local cache carefully for future use.
34 The default value is "0", set rebaseable=1 for rebaseable git repo.
35
36- nocheckout
37 Don't checkout source code when unpacking. set this option for the recipe
38 who has its own routine to checkout code.
39 The default is "0", set nocheckout=1 if needed.
40
41- bareclone
42 Create a bare clone of the source code and don't checkout the source code
43 when unpacking. Set this option for the recipe who has its own routine to
44 checkout code and tracking branch requirements.
45 The default is "0", set bareclone=1 if needed.
46
47"""
48
49#Copyright (C) 2005 Richard Purdie
50#
51# This program is free software; you can redistribute it and/or modify
52# it under the terms of the GNU General Public License version 2 as
53# published by the Free Software Foundation.
54#
55# This program is distributed in the hope that it will be useful,
56# but WITHOUT ANY WARRANTY; without even the implied warranty of
57# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
58# GNU General Public License for more details.
59#
60# You should have received a copy of the GNU General Public License along
61# with this program; if not, write to the Free Software Foundation, Inc.,
62# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
63
64import os
65import bb
66from bb import data
67from bb.fetch2 import FetchMethod
68from bb.fetch2 import runfetchcmd
69from bb.fetch2 import logger
70
71class Git(FetchMethod):
72 """Class to fetch a module or modules from git repositories"""
73 def init(self, d):
74 pass
75
76 def supports(self, url, ud, d):
77 """
78 Check to see if a given url can be fetched with git.
79 """
80 return ud.type in ['git']
81
82 def supports_checksum(self, urldata):
83 return False
84
85 def urldata_init(self, ud, d):
86 """
87 init git specific variable within url data
88 so that the git method like latest_revision() can work
89 """
90 if 'protocol' in ud.parm:
91 ud.proto = ud.parm['protocol']
92 elif not ud.host:
93 ud.proto = 'file'
94 else:
95 ud.proto = "git"
96
97 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
98 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
99
100 ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
101
102 ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
103
104 # bareclone implies nocheckout
105 ud.bareclone = ud.parm.get("bareclone","0") == "1"
106 if ud.bareclone:
107 ud.nocheckout = 1
108
109 branches = ud.parm.get("branch", "master").split(',')
110 if len(branches) != len(ud.names):
111 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
112 ud.branches = {}
113 for name in ud.names:
114 branch = branches[ud.names.index(name)]
115 ud.branches[name] = branch
116
117 ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
118
119 ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
120
121 ud.setup_revisons(d)
122
123 for name in ud.names:
124 # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
125 if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
126 if ud.revisions[name]:
127 ud.branches[name] = ud.revisions[name]
128 ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)
129
130 gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
131 # for rebaseable git repo, it is necessary to keep mirror tar ball
132 # per revision, so that even the revision disappears from the
133 # upstream repo in the future, the mirror will remain intact and still
134 # contains the revision
135 if ud.rebaseable:
136 for name in ud.names:
137 gitsrcname = gitsrcname + '_' + ud.revisions[name]
138 ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
139 ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
140 gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
141 ud.clonedir = os.path.join(gitdir, gitsrcname)
142
143 ud.localfile = ud.clonedir
144
145 def localpath(self, url, ud, d):
146 return ud.clonedir
147
148 def need_update(self, u, ud, d):
149 if not os.path.exists(ud.clonedir):
150 return True
151 os.chdir(ud.clonedir)
152 for name in ud.names:
153 if not self._contains_ref(ud.revisions[name], d):
154 return True
155 if ud.write_tarballs and not os.path.exists(ud.fullmirror):
156 return True
157 return False
158
159 def try_premirror(self, u, ud, d):
160 # If we don't do this, updating an existing checkout with only premirrors
161 # is not possible
162 if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
163 return True
164 if os.path.exists(ud.clonedir):
165 return False
166 return True
167
168 def download(self, loc, ud, d):
169 """Fetch url"""
170
171 if ud.user:
172 username = ud.user + '@'
173 else:
174 username = ""
175
176 ud.repochanged = not os.path.exists(ud.fullmirror)
177
178 # If the checkout doesn't exist and the mirror tarball does, extract it
179 if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
180 bb.utils.mkdirhier(ud.clonedir)
181 os.chdir(ud.clonedir)
182 runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
183
184 repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
185
186 # If the repo still doesn't exist, fallback to cloning it
187 if not os.path.exists(ud.clonedir):
188 # We do this since git will use a "-l" option automatically for local urls where possible
189 if repourl.startswith("file://"):
190 repourl = repourl[7:]
191 clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
192 if ud.proto.lower() != 'file':
193 bb.fetch2.check_network_access(d, clone_cmd)
194 runfetchcmd(clone_cmd, d)
195
196 os.chdir(ud.clonedir)
197 # Update the checkout if needed
198 needupdate = False
199 for name in ud.names:
200 if not self._contains_ref(ud.revisions[name], d):
201 needupdate = True
202 if needupdate:
203 try:
204 runfetchcmd("%s remote rm origin" % ud.basecmd, d)
205 except bb.fetch2.FetchError:
206 logger.debug(1, "No Origin")
207
208 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
209 fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
210 if ud.proto.lower() != 'file':
211 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
212 runfetchcmd(fetch_cmd, d)
213 runfetchcmd("%s prune-packed" % ud.basecmd, d)
214 runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
215 ud.repochanged = True
216
217 def build_mirror_data(self, url, ud, d):
218 # Generate a mirror tarball if needed
219 if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
220 # it's possible that this symlink points to read-only filesystem with PREMIRROR
221 if os.path.islink(ud.fullmirror):
222 os.unlink(ud.fullmirror)
223
224 os.chdir(ud.clonedir)
225 logger.info("Creating tarball of git repository")
226 runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
227 runfetchcmd("touch %s.done" % (ud.fullmirror), d)
228
229 def unpack(self, ud, destdir, d):
230 """ unpack the downloaded src to destdir"""
231
232 subdir = ud.parm.get("subpath", "")
233 if subdir != "":
234 readpathspec = ":%s" % (subdir)
235 def_destsuffix = "%s/" % os.path.basename(subdir)
236 else:
237 readpathspec = ""
238 def_destsuffix = "git/"
239
240 destsuffix = ud.parm.get("destsuffix", def_destsuffix)
241 destdir = ud.destdir = os.path.join(destdir, destsuffix)
242 if os.path.exists(destdir):
243 bb.utils.prunedir(destdir)
244
245 cloneflags = "-s -n"
246 if ud.bareclone:
247 cloneflags += " --mirror"
248
249 # Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
250 # and you end up with some horrible union of the two when you attempt to clone it
251 # The least invasive workaround seems to be a symlink to the real directory to
252 # fool git into ignoring any .git version that may also be present.
253 #
254 # The issue is fixed in more recent versions of git so we can drop this hack in future
255 # when that version becomes common enough.
256 clonedir = ud.clonedir
257 if not ud.path.endswith(".git"):
258 indirectiondir = destdir[:-1] + ".indirectionsymlink"
259 if os.path.exists(indirectiondir):
260 os.remove(indirectiondir)
261 bb.utils.mkdirhier(os.path.dirname(indirectiondir))
262 os.symlink(ud.clonedir, indirectiondir)
263 clonedir = indirectiondir
264
265 runfetchcmd("git clone %s %s/ %s" % (cloneflags, clonedir, destdir), d)
266 if not ud.nocheckout:
267 os.chdir(destdir)
268 if subdir != "":
269 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
270 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
271 else:
272 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
273 return True
274
275 def clean(self, ud, d):
276 """ clean the git directory """
277
278 bb.utils.remove(ud.localpath, True)
279 bb.utils.remove(ud.fullmirror)
280
281 def supports_srcrev(self):
282 return True
283
284 def _contains_ref(self, tag, d):
285 basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
286 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag)
287 output = runfetchcmd(cmd, d, quiet=True)
288 if len(output.split()) > 1:
289 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
290 return output.split()[0] != "0"
291
292 def _revision_key(self, url, ud, d, name):
293 """
294 Return a unique key for the url
295 """
296 return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name]
297
298 def _latest_revision(self, url, ud, d, name):
299 """
300 Compute the HEAD revision for the url
301 """
302 if ud.user:
303 username = ud.user + '@'
304 else:
305 username = ""
306
307 basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
308 cmd = "%s ls-remote %s://%s%s%s %s" % \
309 (basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name])
310 if ud.proto.lower() != 'file':
311 bb.fetch2.check_network_access(d, cmd)
312 output = runfetchcmd(cmd, d, True)
313 if not output:
314 raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url)
315 return output.split()[0]
316
317 def _build_revision(self, url, ud, d, name):
318 return ud.revisions[name]
319
320 def checkstatus(self, uri, ud, d):
321 fetchcmd = "%s ls-remote %s" % (ud.basecmd, uri)
322 try:
323 runfetchcmd(fetchcmd, d, quiet=True)
324 return True
325 except FetchError:
326 return False
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
new file mode 100644
index 0000000000..572b637c9a
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -0,0 +1,78 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git submodules implementation
5"""
6
7# Copyright (C) 2013 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import os
23import bb
24from bb import data
25from bb.fetch2.git import Git
26from bb.fetch2 import runfetchcmd
27from bb.fetch2 import logger
28
29class GitSM(Git):
30 def supports(self, url, ud, d):
31 """
32 Check to see if a given url can be fetched with git.
33 """
34 return ud.type in ['gitsm']
35
36 def uses_submodules(self, ud, d):
37 for name in ud.names:
38 try:
39 runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
40 return True
41 except bb.fetch.FetchError:
42 pass
43 return False
44
45 def update_submodules(self, u, ud, d):
46 # We have to convert bare -> full repo, do the submodule bit, then convert back
47 tmpclonedir = ud.clonedir + ".tmp"
48 gitdir = tmpclonedir + os.sep + ".git"
49 bb.utils.remove(tmpclonedir, True)
50 os.mkdir(tmpclonedir)
51 os.rename(ud.clonedir, gitdir)
52 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
53 os.chdir(tmpclonedir)
54 runfetchcmd("git reset --hard", d)
55 runfetchcmd("git submodule init", d)
56 runfetchcmd("git submodule update", d)
57 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
58 os.rename(gitdir, ud.clonedir,)
59 bb.utils.remove(tmpclonedir, True)
60
61 def download(self, loc, ud, d):
62 Git.download(self, loc, ud, d)
63
64 os.chdir(ud.clonedir)
65 submodules = self.uses_submodules(ud, d)
66 if submodules:
67 self.update_submodules(loc, ud, d)
68
69 def unpack(self, ud, destdir, d):
70 Git.unpack(self, ud, destdir, d)
71
72 os.chdir(ud.destdir)
73 submodules = self.uses_submodules(ud, d)
74 if submodules:
75 runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d)
76 runfetchcmd("git submodule init", d)
77 runfetchcmd("git submodule update", d)
78
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
new file mode 100644
index 0000000000..b1c8675dd4
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -0,0 +1,181 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for mercurial DRCS (hg).
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10# Copyright (C) 2007 Robert Schuster
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27import os
28import sys
29import logging
30import bb
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import runfetchcmd
36from bb.fetch2 import logger
37
38class Hg(FetchMethod):
39 """Class to fetch from mercurial repositories"""
40 def supports(self, url, ud, d):
41 """
42 Check to see if a given url can be fetched with mercurial.
43 """
44 return ud.type in ['hg']
45
46 def urldata_init(self, ud, d):
47 """
48 init hg specific variable within url data
49 """
50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url)
52
53 ud.module = ud.parm["module"]
54
55 # Create paths to mercurial checkouts
56 relpath = self._strip_leading_slashes(ud.path)
57 ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
58 ud.moddir = os.path.join(ud.pkgdir, ud.module)
59
60 ud.setup_revisons(d)
61
62 if 'rev' in ud.parm:
63 ud.revision = ud.parm['rev']
64 elif not ud.revision:
65 ud.revision = self.latest_revision(ud.url, ud, d)
66
67 ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
68
69 def need_update(self, url, ud, d):
70 revTag = ud.parm.get('rev', 'tip')
71 if revTag == "tip":
72 return True
73 if not os.path.exists(ud.localpath):
74 return True
75 return False
76
77 def _buildhgcommand(self, ud, d, command):
78 """
79 Build up an hg commandline based on ud
80 command is "fetch", "update", "info"
81 """
82
83 basecmd = data.expand('${FETCHCMD_hg}', d)
84
85 proto = ud.parm.get('protocol', 'http')
86
87 host = ud.host
88 if proto == "file":
89 host = "/"
90 ud.host = "localhost"
91
92 if not ud.user:
93 hgroot = host + ud.path
94 else:
95 hgroot = ud.user + "@" + host + ud.path
96
97 if command == "info":
98 return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
99
100 options = [];
101
102 # Don't specify revision for the fetch; clone the entire repo.
103 # This avoids an issue if the specified revision is a tag, because
104 # the tag actually exists in the specified revision + 1, so it won't
105 # be available when used in any successive commands.
106 if ud.revision and command != "fetch":
107 options.append("-r %s" % ud.revision)
108
109 if command == "fetch":
110 cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
111 elif command == "pull":
112 # do not pass options list; limiting pull to rev causes the local
113 # repo not to contain it and immediately following "update" command
114 # will crash
115 cmd = "%s pull" % (basecmd)
116 elif command == "update":
117 cmd = "%s update -C %s" % (basecmd, " ".join(options))
118 else:
119 raise FetchError("Invalid hg command %s" % command, ud.url)
120
121 return cmd
122
123 def download(self, loc, ud, d):
124 """Fetch url"""
125
126 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
127
128 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
129 updatecmd = self._buildhgcommand(ud, d, "pull")
130 logger.info("Update " + loc)
131 # update sources there
132 os.chdir(ud.moddir)
133 logger.debug(1, "Running %s", updatecmd)
134 bb.fetch2.check_network_access(d, updatecmd, ud.url)
135 runfetchcmd(updatecmd, d)
136
137 else:
138 fetchcmd = self._buildhgcommand(ud, d, "fetch")
139 logger.info("Fetch " + loc)
140 # check out sources there
141 bb.utils.mkdirhier(ud.pkgdir)
142 os.chdir(ud.pkgdir)
143 logger.debug(1, "Running %s", fetchcmd)
144 bb.fetch2.check_network_access(d, fetchcmd, ud.url)
145 runfetchcmd(fetchcmd, d)
146
147 # Even when we clone (fetch), we still need to update as hg's clone
148 # won't checkout the specified revision if its on a branch
149 updatecmd = self._buildhgcommand(ud, d, "update")
150 os.chdir(ud.moddir)
151 logger.debug(1, "Running %s", updatecmd)
152 runfetchcmd(updatecmd, d)
153
154 scmdata = ud.parm.get("scmdata", "")
155 if scmdata == "keep":
156 tar_flags = ""
157 else:
158 tar_flags = "--exclude '.hg' --exclude '.hgrags'"
159
160 os.chdir(ud.pkgdir)
161 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
162
163 def supports_srcrev(self):
164 return True
165
166 def _latest_revision(self, url, ud, d, name):
167 """
168 Compute tip revision for the url
169 """
170 bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
171 output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
172 return output.strip()
173
174 def _build_revision(self, url, ud, d, name):
175 return ud.revision
176
177 def _revision_key(self, url, ud, d, name):
178 """
179 Return a unique key for the url
180 """
181 return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
new file mode 100644
index 0000000000..58bbe20327
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -0,0 +1,116 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import urllib
30import bb
31import bb.utils
32from bb import data
33from bb.fetch2 import FetchMethod, FetchError
34from bb.fetch2 import logger
35
36class Local(FetchMethod):
37 def supports(self, url, urldata, d):
38 """
39 Check to see if a given url represents a local fetch.
40 """
41 return urldata.type in ['file']
42
43 def urldata_init(self, ud, d):
44 # We don't set localfile as for this fetcher the file is already local!
45 ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
46 ud.basename = os.path.basename(ud.decodedurl)
47 ud.basepath = ud.decodedurl
48 return
49
50 def localpath(self, url, urldata, d):
51 """
52 Return the local filename of a given url assuming a successful fetch.
53 """
54 path = urldata.decodedurl
55 newpath = path
56 if path[0] != "/":
57 filespath = data.getVar('FILESPATH', d, True)
58 if filespath:
59 logger.debug(2, "Searching for %s in paths: \n%s" % (path, "\n ".join(filespath.split(":"))))
60 newpath = bb.utils.which(filespath, path)
61 if not newpath:
62 filesdir = data.getVar('FILESDIR', d, True)
63 if filesdir:
64 logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
65 newpath = os.path.join(filesdir, path)
66 if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
67 # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
68 newpath = bb.utils.which(filespath, ".")
69 logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
70 return newpath
71 if not os.path.exists(newpath):
72 dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
73 logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
74 bb.utils.mkdirhier(os.path.dirname(dldirfile))
75 return dldirfile
76 return newpath
77
78 def need_update(self, url, ud, d):
79 if url.find("*") != -1:
80 return False
81 if os.path.exists(ud.localpath):
82 return False
83 return True
84
85 def download(self, url, urldata, d):
86 """Fetch urls (no-op for Local method)"""
87 # no need to fetch local files, we'll deal with them in place.
88 if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
89 locations = []
90 filespath = data.getVar('FILESPATH', d, True)
91 if filespath:
92 locations = filespath.split(":")
93 filesdir = data.getVar('FILESDIR', d, True)
94 if filesdir:
95 locations.append(filesdir)
96 locations.append(d.getVar("DL_DIR", True))
97
98 msg = "Unable to find file " + url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
99 raise FetchError(msg)
100
101 return True
102
103 def checkstatus(self, url, urldata, d):
104 """
105 Check the status of the url
106 """
107 if urldata.localpath.find("*") != -1:
108 logger.info("URL %s looks like a glob and was therefore not checked.", url)
109 return True
110 if os.path.exists(urldata.localpath):
111 return True
112 return False
113
114 def clean(self, urldata, d):
115 return
116
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
new file mode 100644
index 0000000000..1a3a7bb56b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -0,0 +1,135 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4Bitbake "Fetch" implementation for osc (Opensuse build service client).
5Based on the svn "Fetch" implementation.
6
7"""
8
9import os
10import sys
11import logging
12import bb
13from bb import data
14from bb.fetch2 import FetchMethod
15from bb.fetch2 import FetchError
16from bb.fetch2 import MissingParameterError
17from bb.fetch2 import runfetchcmd
18
19class Osc(FetchMethod):
20 """Class to fetch a module or modules from Opensuse build server
21 repositories."""
22
23 def supports(self, url, ud, d):
24 """
25 Check to see if a given url can be fetched with osc.
26 """
27 return ud.type in ['osc']
28
29 def urldata_init(self, ud, d):
30 if not "module" in ud.parm:
31 raise MissingParameterError('module', ud.url)
32
33 ud.module = ud.parm["module"]
34
35 # Create paths to osc checkouts
36 relpath = self._strip_leading_slashes(ud.path)
37 ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
38 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
39
40 if 'rev' in ud.parm:
41 ud.revision = ud.parm['rev']
42 else:
43 pv = data.getVar("PV", d, 0)
44 rev = bb.fetch2.srcrev_internal_helper(ud, d)
45 if rev and rev != True:
46 ud.revision = rev
47 else:
48 ud.revision = ""
49
50 ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
51
52 def _buildosccommand(self, ud, d, command):
53 """
54 Build up an ocs commandline based on ud
55 command is "fetch", "update", "info"
56 """
57
58 basecmd = data.expand('${FETCHCMD_osc}', d)
59
60 proto = ud.parm.get('protocol', 'ocs')
61
62 options = []
63
64 config = "-c %s" % self.generate_config(ud, d)
65
66 if ud.revision:
67 options.append("-r %s" % ud.revision)
68
69 coroot = self._strip_leading_slashes(ud.path)
70
71 if command == "fetch":
72 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
73 elif command == "update":
74 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
75 else:
76 raise FetchError("Invalid osc command %s" % command, ud.url)
77
78 return osccmd
79
80 def download(self, loc, ud, d):
81 """
82 Fetch url
83 """
84
85 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
86
87 if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
88 oscupdatecmd = self._buildosccommand(ud, d, "update")
89 logger.info("Update "+ loc)
90 # update sources there
91 os.chdir(ud.moddir)
92 logger.debug(1, "Running %s", oscupdatecmd)
93 bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
94 runfetchcmd(oscupdatecmd, d)
95 else:
96 oscfetchcmd = self._buildosccommand(ud, d, "fetch")
97 logger.info("Fetch " + loc)
98 # check out sources there
99 bb.utils.mkdirhier(ud.pkgdir)
100 os.chdir(ud.pkgdir)
101 logger.debug(1, "Running %s", oscfetchcmd)
102 bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
103 runfetchcmd(oscfetchcmd, d)
104
105 os.chdir(os.path.join(ud.pkgdir + ud.path))
106 # tar them up to a defined filename
107 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
108
109 def supports_srcrev(self):
110 return False
111
112 def generate_config(self, ud, d):
113 """
114 Generate a .oscrc to be used for this run.
115 """
116
117 config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
118 if (os.path.exists(config_path)):
119 os.remove(config_path)
120
121 f = open(config_path, 'w')
122 f.write("[general]\n")
123 f.write("apisrv = %s\n" % ud.host)
124 f.write("scheme = http\n")
125 f.write("su-wrapper = su -c\n")
126 f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
127 f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
128 f.write("extra-pkgs = gzip\n")
129 f.write("\n")
130 f.write("[%s]\n" % ud.host)
131 f.write("user = %s\n" % ud.parm["user"])
132 f.write("pass = %s\n" % ud.parm["pswd"])
133 f.close()
134
135 return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
new file mode 100644
index 0000000000..fc4074d5a3
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -0,0 +1,198 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from future_builtins import zip
29import os
30import subprocess
31import logging
32import bb
33from bb import data
34from bb.fetch2 import FetchMethod
35from bb.fetch2 import FetchError
36from bb.fetch2 import logger
37from bb.fetch2 import runfetchcmd
38
39class Perforce(FetchMethod):
40 def supports(self, url, ud, d):
41 return ud.type in ['p4']
42
43 def doparse(url, d):
44 parm = {}
45 path = url.split("://")[1]
46 delim = path.find("@");
47 if delim != -1:
48 (user, pswd, host, port) = path.split('@')[0].split(":")
49 path = path.split('@')[1]
50 else:
51 (host, port) = data.getVar('P4PORT', d).split(':')
52 user = ""
53 pswd = ""
54
55 if path.find(";") != -1:
56 keys=[]
57 values=[]
58 plist = path.split(';')
59 for item in plist:
60 if item.count('='):
61 (key, value) = item.split('=')
62 keys.append(key)
63 values.append(value)
64
65 parm = dict(zip(keys, values))
66 path = "//" + path.split(';')[0]
67 host += ":%s" % (port)
68 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
69
70 return host, path, user, pswd, parm
71 doparse = staticmethod(doparse)
72
73 def getcset(d, depot, host, user, pswd, parm):
74 p4opt = ""
75 if "cset" in parm:
76 return parm["cset"];
77 if user:
78 p4opt += " -u %s" % (user)
79 if pswd:
80 p4opt += " -P %s" % (pswd)
81 if host:
82 p4opt += " -p %s" % (host)
83
84 p4date = data.getVar("P4DATE", d, True)
85 if "revision" in parm:
86 depot += "#%s" % (parm["revision"])
87 elif "label" in parm:
88 depot += "@%s" % (parm["label"])
89 elif p4date:
90 depot += "@%s" % (p4date)
91
92 p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
93 logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
94 p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
95 cset = p4file.strip()
96 logger.debug(1, "READ %s", cset)
97 if not cset:
98 return -1
99
100 return cset.split(' ')[1]
101 getcset = staticmethod(getcset)
102
103 def urldata_init(self, ud, d):
104 (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
105
106 # If a label is specified, we use that as our filename
107
108 if "label" in parm:
109 ud.localfile = "%s.tar.gz" % (parm["label"])
110 return
111
112 base = path
113 which = path.find('/...')
114 if which != -1:
115 base = path[:which]
116
117 base = self._strip_leading_slashes(base)
118
119 cset = Perforce.getcset(d, path, host, user, pswd, parm)
120
121 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
122
123 def download(self, loc, ud, d):
124 """
125 Fetch urls
126 """
127
128 (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
129
130 if depot.find('/...') != -1:
131 path = depot[:depot.find('/...')]
132 else:
133 path = depot
134
135 module = parm.get('module', os.path.basename(path))
136
137 localdata = data.createCopy(d)
138 data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
139 data.update_data(localdata)
140
141 # Get the p4 command
142 p4opt = ""
143 if user:
144 p4opt += " -u %s" % (user)
145
146 if pswd:
147 p4opt += " -P %s" % (pswd)
148
149 if host:
150 p4opt += " -p %s" % (host)
151
152 p4cmd = data.getVar('FETCHCOMMAND', localdata, True)
153
154 # create temp directory
155 logger.debug(2, "Fetch: creating temporary directory")
156 bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
157 data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
158 tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
159 tmpfile = tmpfile.strip()
160 if not tmpfile:
161 raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
162
163 if "label" in parm:
164 depot = "%s@%s" % (depot, parm["label"])
165 else:
166 cset = Perforce.getcset(d, depot, host, user, pswd, parm)
167 depot = "%s@%s" % (depot, cset)
168
169 os.chdir(tmpfile)
170 logger.info("Fetch " + loc)
171 logger.info("%s%s files %s", p4cmd, p4opt, depot)
172 p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
173 p4file = [f.rstrip() for f in p4file.splitlines()]
174
175 if not p4file:
176 raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)
177
178 count = 0
179
180 for file in p4file:
181 list = file.split()
182
183 if list[2] == "delete":
184 continue
185
186 dest = list[0][len(path)+1:]
187 where = dest.find("#")
188
189 subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
190 count = count + 1
191
192 if count == 0:
193 logger.error()
194 raise FetchError("Fetch: No files gathered from the P4 fetch", loc)
195
196 runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
197 # cleanup
198 bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
new file mode 100644
index 0000000000..8300da8c5a
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -0,0 +1,98 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake "Fetch" repo (git) implementation
5
6"""
7
8# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
9#
10# Based on git.py which is:
11#Copyright (C) 2005 Richard Purdie
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import bb
28from bb import data
29from bb.fetch2 import FetchMethod
30from bb.fetch2 import runfetchcmd
31
32class Repo(FetchMethod):
33 """Class to fetch a module or modules from repo (git) repositories"""
34 def supports(self, url, ud, d):
35 """
36 Check to see if a given url can be fetched with repo.
37 """
38 return ud.type in ["repo"]
39
40 def urldata_init(self, ud, d):
41 """
42 We don"t care about the git rev of the manifests repository, but
43 we do care about the manifest to use. The default is "default".
44 We also care about the branch or tag to be used. The default is
45 "master".
46 """
47
48 ud.proto = ud.parm.get('protocol', 'git')
49 ud.branch = ud.parm.get('branch', 'master')
50 ud.manifest = ud.parm.get('manifest', 'default.xml')
51 if not ud.manifest.endswith('.xml'):
52 ud.manifest += '.xml'
53
54 ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
55
56 def download(self, loc, ud, d):
57 """Fetch url"""
58
59 if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
60 logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
61 return
62
63 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
64 repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
65 codir = os.path.join(repodir, gitsrcname, ud.manifest)
66
67 if ud.user:
68 username = ud.user + "@"
69 else:
70 username = ""
71
72 bb.utils.mkdirhier(os.path.join(codir, "repo"))
73 os.chdir(os.path.join(codir, "repo"))
74 if not os.path.exists(os.path.join(codir, "repo", ".repo")):
75 bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
76 runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
77
78 bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
79 runfetchcmd("repo sync", d)
80 os.chdir(codir)
81
82 scmdata = ud.parm.get("scmdata", "")
83 if scmdata == "keep":
84 tar_flags = ""
85 else:
86 tar_flags = "--exclude '.repo' --exclude '.git'"
87
88 # Create a cache
89 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
90
91 def supports_srcrev(self):
92 return False
93
94 def _build_revision(self, url, ud, d):
95 return ud.manifest
96
97 def _want_sortable_revision(self, url, ud, d):
98 return False
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
new file mode 100644
index 0000000000..5fbbcfdd90
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -0,0 +1,129 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake SFTP Fetch implementation
5
6Class for fetching files via SFTP. It tries to adhere to the (now
7expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
8Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
9(SSH)" (SECSH URI).
10
11It uses SFTP (as to adhere to the SECSH URI specification). It only
12supports key based authentication, not password. This class, unlike
13the SSH fetcher, does not support fetching a directory tree from the
14remote.
15
16 http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
17 https://www.iana.org/assignments/uri-schemes/prov/sftp
18 https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
19
20Please note that '/' is used as host path seperator, and not ":"
21as you may be used to from the scp/sftp commands. You can use a
22~ (tilde) to specify a path relative to your home directory.
23(The /~user/ syntax, for specyfing a path relative to another
24user's home directory is not supported.) Note that the tilde must
25still follow the host path seperator ("/"). See exampels below.
26
27Example SRC_URIs:
28
29SRC_URI = "sftp://host.example.com/dir/path.file.txt"
30
31A path relative to your home directory.
32
33SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
34
35You can also specify a username (specyfing password in the
36URI is not supported, use SSH keys to authenticate):
37
38SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
39
40"""
41
42# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
43#
44# Based in part on bb.fetch2.wget:
45# Copyright (C) 2003, 2004 Chris Larson
46#
47# This program is free software; you can redistribute it and/or modify
48# it under the terms of the GNU General Public License version 2 as
49# published by the Free Software Foundation.
50#
51# This program is distributed in the hope that it will be useful,
52# but WITHOUT ANY WARRANTY; without even the implied warranty of
53# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
54# GNU General Public License for more details.
55#
56# You should have received a copy of the GNU General Public License along
57# with this program; if not, write to the Free Software Foundation, Inc.,
58# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
59#
60# Based on functions from the base bb module, Copyright 2003 Holger Schurig
61
62import os
63import bb
64import urllib
65import commands
66from bb import data
67from bb.fetch2 import URI
68from bb.fetch2 import FetchMethod
69from bb.fetch2 import runfetchcmd
70
71
72class SFTP(FetchMethod):
73 """Class to fetch urls via 'sftp'"""
74
75 def supports(self, url, ud, d):
76 """
77 Check to see if a given url can be fetched with sftp.
78 """
79 return ud.type in ['sftp']
80
81 def recommends_checksum(self, urldata):
82 return True
83
84 def urldata_init(self, ud, d):
85 if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
86 raise bb.fetch2.ParameterError(
87 "Invalid protocol - if you wish to fetch from a " +
88 "git repository using ssh, you need to use the " +
89 "git:// prefix with protocol=ssh", ud.url)
90
91 if 'downloadfilename' in ud.parm:
92 ud.basename = ud.parm['downloadfilename']
93 else:
94 ud.basename = os.path.basename(ud.path)
95
96 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
97
98 def download(self, uri, ud, d):
99 """Fetch urls"""
100
101 urlo = URI(uri)
102 basecmd = 'sftp -oPasswordAuthentication=no'
103 port = ''
104 if urlo.port:
105 port = '-P %d' % urlo.port
106 urlo.port = None
107
108 dldir = data.getVar('DL_DIR', d, True)
109 lpath = os.path.join(dldir, ud.localfile)
110
111 user = ''
112 if urlo.userinfo:
113 user = urlo.userinfo + '@'
114
115 path = urlo.path
116
117 # Supoprt URIs relative to the user's home directory, with
118 # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
119 if path[:3] == '/~/':
120 path = path[3:]
121
122 remote = '%s%s:%s' % (user, urlo.hostname, path)
123
124 cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
125 commands.mkarg(lpath))
126
127 bb.fetch2.check_network_access(d, cmd, uri)
128 runfetchcmd(cmd, d)
129 return True
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
new file mode 100644
index 0000000000..8b5acbf6db
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -0,0 +1,127 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3'''
4BitBake 'Fetch' implementations
5
6This implementation is for Secure Shell (SSH), and attempts to comply with the
7IETF secsh internet draft:
8 http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
9
10 Currently does not support the sftp parameters, as this uses scp
11 Also does not support the 'fingerprint' connection parameter.
12
13 Please note that '/' is used as host, path separator not ':' as you may
14 be used to, also '~' can be used to specify user HOME, but again after '/'
15
16 Example SRC_URI:
17 SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
18 SRC_URI = "ssh://user@host.example.com/~/file.txt"
19'''
20
21# Copyright (C) 2006 OpenedHand Ltd.
22#
23#
24# Based in part on svk.py:
25# Copyright (C) 2006 Holger Hans Peter Freyther
26# Based on svn.py:
27# Copyright (C) 2003, 2004 Chris Larson
28# Based on functions from the base bb module:
29# Copyright 2003 Holger Schurig
30#
31#
32# This program is free software; you can redistribute it and/or modify
33# it under the terms of the GNU General Public License version 2 as
34# published by the Free Software Foundation.
35#
36# This program is distributed in the hope that it will be useful,
37# but WITHOUT ANY WARRANTY; without even the implied warranty of
38# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
39# GNU General Public License for more details.
40#
41# You should have received a copy of the GNU General Public License along
42# with this program; if not, write to the Free Software Foundation, Inc.,
43# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
44
45import re, os
46from bb import data
47from bb.fetch2 import FetchMethod
48from bb.fetch2 import FetchError
49from bb.fetch2 import logger
50from bb.fetch2 import runfetchcmd
51
52
53__pattern__ = re.compile(r'''
54 \s* # Skip leading whitespace
55 ssh:// # scheme
56 ( # Optional username/password block
57 (?P<user>\S+) # username
58 (:(?P<pass>\S+))? # colon followed by the password (optional)
59 )?
60 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
61 @
62 (?P<host>\S+?) # non-greedy match of the host
63 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
64 /
65 (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
66 # and may include the use of '~' to reference the remote home
67 # directory
68 (?P<sparam>(;[^;]+)*)? # parameters block (optional)
69 $
70''', re.VERBOSE)
71
72class SSH(FetchMethod):
73 '''Class to fetch a module or modules via Secure Shell'''
74
75 def supports(self, url, urldata, d):
76 return __pattern__.match(url) != None
77
78 def supports_checksum(self, urldata):
79 return False
80
81 def urldata_init(self, urldata, d):
82 if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
83 raise bb.fetch2.ParameterError(
84 "Invalid protocol - if you wish to fetch from a git " +
85 "repository using ssh, you need to use " +
86 "git:// prefix with protocol=ssh", urldata.url)
87 m = __pattern__.match(urldata.url)
88 path = m.group('path')
89 host = m.group('host')
90 urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
91
92 def download(self, url, urldata, d):
93 dldir = d.getVar('DL_DIR', True)
94
95 m = __pattern__.match(url)
96 path = m.group('path')
97 host = m.group('host')
98 port = m.group('port')
99 user = m.group('user')
100 password = m.group('pass')
101
102 if port:
103 portarg = '-P %s' % port
104 else:
105 portarg = ''
106
107 if user:
108 fr = user
109 if password:
110 fr += ':%s' % password
111 fr += '@%s' % host
112 else:
113 fr = host
114 fr += ':%s' % path
115
116
117 import commands
118 cmd = 'scp -B -r %s %s %s/' % (
119 portarg,
120 commands.mkarg(fr),
121 commands.mkarg(dldir)
122 )
123
124 bb.fetch2.check_network_access(d, cmd, urldata.url)
125
126 runfetchcmd(cmd, d)
127
diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py
new file mode 100644
index 0000000000..ee3823f845
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svk.py
@@ -0,0 +1,97 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6This implementation is for svk. It is based on the svn implementation
7
8"""
9
10# Copyright (C) 2006 Holger Hans Peter Freyther
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import logging
30import bb
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import logger
36from bb.fetch2 import runfetchcmd
37
38class Svk(FetchMethod):
39 """Class to fetch a module or modules from svk repositories"""
40 def supports(self, url, ud, d):
41 """
42 Check to see if a given url can be fetched with svk.
43 """
44 return ud.type in ['svk']
45
46 def urldata_init(self, ud, d):
47
48 if not "module" in ud.parm:
49 raise MissingParameterError('module', ud.url)
50 else:
51 ud.module = ud.parm["module"]
52
53 ud.revision = ud.parm.get('rev', "")
54
55 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
56
57 def need_update(self, url, ud, d):
58 if ud.date == "now":
59 return True
60 if not os.path.exists(ud.localpath):
61 return True
62 return False
63
64 def download(self, loc, ud, d):
65 """Fetch urls"""
66
67 svkroot = ud.host + ud.path
68
69 svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
70
71 if ud.revision:
72 svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
73
74 # create temp directory
75 localdata = data.createCopy(d)
76 data.update_data(localdata)
77 logger.debug(2, "Fetch: creating temporary directory")
78 bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
79 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
80 tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
81 tmpfile = tmpfile.strip()
82 if not tmpfile:
83 logger.error()
84 raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
85
86 # check out sources there
87 os.chdir(tmpfile)
88 logger.info("Fetch " + loc)
89 logger.debug(1, "Running %s", svkcmd)
90 runfetchcmd(svkcmd, d, cleanup = [tmpfile])
91
92 os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
93 # tar them up to a defined filename
94 runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath])
95
96 # cleanup
97 bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
new file mode 100644
index 0000000000..9a779d2448
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -0,0 +1,189 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for svn.
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23#
24# Based on functions from the base bb module, Copyright 2003 Holger Schurig
25
26import os
27import sys
28import logging
29import bb
30from bb import data
31from bb.fetch2 import FetchMethod
32from bb.fetch2 import FetchError
33from bb.fetch2 import MissingParameterError
34from bb.fetch2 import runfetchcmd
35from bb.fetch2 import logger
36
37class Svn(FetchMethod):
38 """Class to fetch a module or modules from svn repositories"""
39 def supports(self, url, ud, d):
40 """
41 Check to see if a given url can be fetched with svn.
42 """
43 return ud.type in ['svn']
44
45 def urldata_init(self, ud, d):
46 """
47 init svn specific variable within url data
48 """
49 if not "module" in ud.parm:
50 raise MissingParameterError('module', ud.url)
51
52 ud.basecmd = d.getVar('FETCHCMD_svn', True)
53
54 ud.module = ud.parm["module"]
55
56 # Create paths to svn checkouts
57 relpath = self._strip_leading_slashes(ud.path)
58 ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
59 ud.moddir = os.path.join(ud.pkgdir, ud.module)
60
61 ud.setup_revisons(d)
62
63 if 'rev' in ud.parm:
64 ud.revision = ud.parm['rev']
65
66 ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
67
68 def _buildsvncommand(self, ud, d, command):
69 """
70 Build up an svn commandline based on ud
71 command is "fetch", "update", "info"
72 """
73
74 proto = ud.parm.get('protocol', 'svn')
75
76 svn_rsh = None
77 if proto == "svn+ssh" and "rsh" in ud.parm:
78 svn_rsh = ud.parm["rsh"]
79
80 svnroot = ud.host + ud.path
81
82 options = []
83
84 options.append("--no-auth-cache")
85
86 if ud.user:
87 options.append("--username %s" % ud.user)
88
89 if ud.pswd:
90 options.append("--password %s" % ud.pswd)
91
92 if command == "info":
93 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
94 else:
95 suffix = ""
96 if ud.revision:
97 options.append("-r %s" % ud.revision)
98 suffix = "@%s" % (ud.revision)
99
100 if command == "fetch":
101 svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
102 elif command == "update":
103 svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
104 else:
105 raise FetchError("Invalid svn command %s" % command, ud.url)
106
107 if svn_rsh:
108 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
109
110 return svncmd
111
112 def download(self, loc, ud, d):
113 """Fetch url"""
114
115 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
116
117 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
118 svnupdatecmd = self._buildsvncommand(ud, d, "update")
119 logger.info("Update " + loc)
120 # update sources there
121 os.chdir(ud.moddir)
122 # We need to attempt to run svn upgrade first in case its an older working format
123 try:
124 runfetchcmd(ud.basecmd + " upgrade", d)
125 except FetchError:
126 pass
127 logger.debug(1, "Running %s", svnupdatecmd)
128 bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
129 runfetchcmd(svnupdatecmd, d)
130 else:
131 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
132 logger.info("Fetch " + loc)
133 # check out sources there
134 bb.utils.mkdirhier(ud.pkgdir)
135 os.chdir(ud.pkgdir)
136 logger.debug(1, "Running %s", svnfetchcmd)
137 bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
138 runfetchcmd(svnfetchcmd, d)
139
140 scmdata = ud.parm.get("scmdata", "")
141 if scmdata == "keep":
142 tar_flags = ""
143 else:
144 tar_flags = "--exclude '.svn'"
145
146 os.chdir(ud.pkgdir)
147 # tar them up to a defined filename
148 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
149
150 def clean(self, ud, d):
151 """ Clean SVN specific files and dirs """
152
153 bb.utils.remove(ud.localpath)
154 bb.utils.remove(ud.moddir, True)
155
156
157 def supports_srcrev(self):
158 return True
159
160 def _revision_key(self, url, ud, d, name):
161 """
162 Return a unique key for the url
163 """
164 return "svn:" + ud.moddir
165
166 def _latest_revision(self, url, ud, d, name):
167 """
168 Return the latest upstream revision number
169 """
170 bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "info"))
171
172 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
173
174 revision = None
175 for line in output.splitlines():
176 if "Last Changed Rev" in line:
177 revision = line.split(":")[1].strip()
178
179 return revision
180
181 def sortable_revision(self, url, ud, d, name):
182 """
183 Return a sortable revision number which in our case is the revision number
184 """
185
186 return False, self._build_revision(url, ud, d)
187
188 def _build_revision(self, url, ud, d):
189 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
new file mode 100644
index 0000000000..131016ce89
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -0,0 +1,97 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import logging
30import bb
31import urllib
32from bb import data
33from bb.fetch2 import FetchMethod
34from bb.fetch2 import FetchError
35from bb.fetch2 import logger
36from bb.fetch2 import runfetchcmd
37
38class Wget(FetchMethod):
39 """Class to fetch urls via 'wget'"""
40 def supports(self, url, ud, d):
41 """
42 Check to see if a given url can be fetched with wget.
43 """
44 return ud.type in ['http', 'https', 'ftp']
45
46 def recommends_checksum(self, urldata):
47 return True
48
49 def urldata_init(self, ud, d):
50 if 'protocol' in ud.parm:
51 if ud.parm['protocol'] == 'git':
52 raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
53
54 if 'downloadfilename' in ud.parm:
55 ud.basename = ud.parm['downloadfilename']
56 else:
57 ud.basename = os.path.basename(ud.path)
58
59 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
60
61 def download(self, uri, ud, d, checkonly = False):
62 """Fetch urls"""
63
64 basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
65
66 if not checkonly and 'downloadfilename' in ud.parm:
67 dldir = d.getVar("DL_DIR", True)
68 bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
69 basecmd += " -O " + dldir + os.sep + ud.localfile
70
71 if checkonly:
72 fetchcmd = d.getVar("CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'")
73 elif os.path.exists(ud.localpath):
74 # file exists, but we didnt complete it.. trying again..
75 fetchcmd = d.getVar("RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'")
76 else:
77 fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'")
78
79 uri = uri.split(";")[0]
80
81 fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
82 fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
83 if not checkonly:
84 logger.info("fetch " + uri)
85 logger.debug(2, "executing " + fetchcmd)
86 bb.fetch2.check_network_access(d, fetchcmd)
87 runfetchcmd(fetchcmd, d, quiet=checkonly)
88
89 # Sanity check since wget can pretend it succeed when it didn't
90 # Also, this used to happen if sourceforge sent us to the mirror page
91 if not os.path.exists(ud.localpath) and not checkonly:
92 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
93
94 return True
95
96 def checkstatus(self, uri, ud, d):
97 return self.download(uri, ud, d, True)