summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1575
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py143
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py171
-rw-r--r--bitbake/lib/bb/fetch2/git.py355
-rw-r--r--bitbake/lib/bb/fetch2/gitannex.py76
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py126
-rw-r--r--bitbake/lib/bb/fetch2/hg.py187
-rw-r--r--bitbake/lib/bb/fetch2/local.py116
-rw-r--r--bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py194
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py129
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py127
-rw-r--r--bitbake/lib/bb/fetch2/svn.py191
-rw-r--r--bitbake/lib/bb/fetch2/wget.py106
15 files changed, 3729 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000000..5a03a0e46e
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1575 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from __future__ import absolute_import
29from __future__ import print_function
30import os, re
31import signal
32import glob
33import logging
34import urllib
35import urlparse
36import operator
37import bb.persist_data, bb.utils
38import bb.checksum
39from bb import data
40import bb.process
41import subprocess
42
43__version__ = "2"
44_checksum_cache = bb.checksum.FileChecksumCache()
45
46logger = logging.getLogger("BitBake.Fetcher")
47
48class BBFetchException(Exception):
49 """Class all fetch exceptions inherit from"""
50 def __init__(self, message):
51 self.msg = message
52 Exception.__init__(self, message)
53
54 def __str__(self):
55 return self.msg
56
57class MalformedUrl(BBFetchException):
58 """Exception raised when encountering an invalid url"""
59 def __init__(self, url):
60 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
61 self.url = url
62 BBFetchException.__init__(self, msg)
63 self.args = (url,)
64
65class FetchError(BBFetchException):
66 """General fetcher exception when something happens incorrectly"""
67 def __init__(self, message, url = None):
68 if url:
69 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
70 else:
71 msg = "Fetcher failure: %s" % message
72 self.url = url
73 BBFetchException.__init__(self, msg)
74 self.args = (message, url)
75
76class ChecksumError(FetchError):
77 """Exception when mismatched checksum encountered"""
78 def __init__(self, message, url = None, checksum = None):
79 self.checksum = checksum
80 FetchError.__init__(self, message, url)
81
82class NoChecksumError(FetchError):
83 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
84
85class UnpackError(BBFetchException):
86 """General fetcher exception when something happens incorrectly when unpacking"""
87 def __init__(self, message, url):
88 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
89 self.url = url
90 BBFetchException.__init__(self, msg)
91 self.args = (message, url)
92
93class NoMethodError(BBFetchException):
94 """Exception raised when there is no method to obtain a supplied url or set of urls"""
95 def __init__(self, url):
96 msg = "Could not find a fetcher which supports the URL: '%s'" % url
97 self.url = url
98 BBFetchException.__init__(self, msg)
99 self.args = (url,)
100
101class MissingParameterError(BBFetchException):
102 """Exception raised when a fetch method is missing a critical parameter in the url"""
103 def __init__(self, missing, url):
104 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
105 self.url = url
106 self.missing = missing
107 BBFetchException.__init__(self, msg)
108 self.args = (missing, url)
109
110class ParameterError(BBFetchException):
111 """Exception raised when a url cannot be proccessed due to invalid parameters."""
112 def __init__(self, message, url):
113 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
114 self.url = url
115 BBFetchException.__init__(self, msg)
116 self.args = (message, url)
117
118class NetworkAccess(BBFetchException):
119 """Exception raised when network access is disabled but it is required."""
120 def __init__(self, url, cmd):
121 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
122 self.url = url
123 self.cmd = cmd
124 BBFetchException.__init__(self, msg)
125 self.args = (url, cmd)
126
127class NonLocalMethod(Exception):
128 def __init__(self):
129 Exception.__init__(self)
130
131
132class URI(object):
133 """
134 A class representing a generic URI, with methods for
135 accessing the URI components, and stringifies to the
136 URI.
137
138 It is constructed by calling it with a URI, or setting
139 the attributes manually:
140
141 uri = URI("http://example.com/")
142
143 uri = URI()
144 uri.scheme = 'http'
145 uri.hostname = 'example.com'
146 uri.path = '/'
147
148 It has the following attributes:
149
150 * scheme (read/write)
151 * userinfo (authentication information) (read/write)
152 * username (read/write)
153 * password (read/write)
154
155 Note, password is deprecated as of RFC 3986.
156
157 * hostname (read/write)
158 * port (read/write)
159 * hostport (read only)
160 "hostname:port", if both are set, otherwise just "hostname"
161 * path (read/write)
162 * path_quoted (read/write)
163 A URI quoted version of path
164 * params (dict) (read/write)
165 * query (dict) (read/write)
166 * relative (bool) (read only)
167 True if this is a "relative URI", (e.g. file:foo.diff)
168
169 It stringifies to the URI itself.
170
171 Some notes about relative URIs: while it's specified that
172 a URI beginning with <scheme>:// should either be directly
173 followed by a hostname or a /, the old URI handling of the
174 fetch2 library did not comform to this. Therefore, this URI
175 class has some kludges to make sure that URIs are parsed in
176 a way comforming to bitbake's current usage. This URI class
177 supports the following:
178
179 file:relative/path.diff (IETF compliant)
180 git:relative/path.git (IETF compliant)
181 git:///absolute/path.git (IETF compliant)
182 file:///absolute/path.diff (IETF compliant)
183
184 file://relative/path.diff (not IETF compliant)
185
186 But it does not support the following:
187
188 file://hostname/absolute/path.diff (would be IETF compliant)
189
190 Note that the last case only applies to a list of
191 "whitelisted" schemes (currently only file://), that requires
192 its URIs to not have a network location.
193 """
194
195 _relative_schemes = ['file', 'git']
196 _netloc_forbidden = ['file']
197
198 def __init__(self, uri=None):
199 self.scheme = ''
200 self.userinfo = ''
201 self.hostname = ''
202 self.port = None
203 self._path = ''
204 self.params = {}
205 self.query = {}
206 self.relative = False
207
208 if not uri:
209 return
210
211 # We hijack the URL parameters, since the way bitbake uses
212 # them are not quite RFC compliant.
213 uri, param_str = (uri.split(";", 1) + [None])[:2]
214
215 urlp = urlparse.urlparse(uri)
216 self.scheme = urlp.scheme
217
218 reparse = 0
219
220 # Coerce urlparse to make URI scheme use netloc
221 if not self.scheme in urlparse.uses_netloc:
222 urlparse.uses_params.append(self.scheme)
223 reparse = 1
224
225 # Make urlparse happy(/ier) by converting local resources
226 # to RFC compliant URL format. E.g.:
227 # file://foo.diff -> file:foo.diff
228 if urlp.scheme in self._netloc_forbidden:
229 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
230 reparse = 1
231
232 if reparse:
233 urlp = urlparse.urlparse(uri)
234
235 # Identify if the URI is relative or not
236 if urlp.scheme in self._relative_schemes and \
237 re.compile("^\w+:(?!//)").match(uri):
238 self.relative = True
239
240 if not self.relative:
241 self.hostname = urlp.hostname or ''
242 self.port = urlp.port
243
244 self.userinfo += urlp.username or ''
245
246 if urlp.password:
247 self.userinfo += ':%s' % urlp.password
248
249 self.path = urllib.unquote(urlp.path)
250
251 if param_str:
252 self.params = self._param_str_split(param_str, ";")
253 if urlp.query:
254 self.query = self._param_str_split(urlp.query, "&")
255
256 def __str__(self):
257 userinfo = self.userinfo
258 if userinfo:
259 userinfo += '@'
260
261 return "%s:%s%s%s%s%s%s" % (
262 self.scheme,
263 '' if self.relative else '//',
264 userinfo,
265 self.hostport,
266 self.path_quoted,
267 self._query_str(),
268 self._param_str())
269
270 def _param_str(self):
271 return (
272 ''.join([';', self._param_str_join(self.params, ";")])
273 if self.params else '')
274
275 def _query_str(self):
276 return (
277 ''.join(['?', self._param_str_join(self.query, "&")])
278 if self.query else '')
279
280 def _param_str_split(self, string, elmdelim, kvdelim="="):
281 ret = {}
282 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
283 ret[k] = v
284 return ret
285
286 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
287 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
288
289 @property
290 def hostport(self):
291 if not self.port:
292 return self.hostname
293 return "%s:%d" % (self.hostname, self.port)
294
295 @property
296 def path_quoted(self):
297 return urllib.quote(self.path)
298
299 @path_quoted.setter
300 def path_quoted(self, path):
301 self.path = urllib.unquote(path)
302
303 @property
304 def path(self):
305 return self._path
306
307 @path.setter
308 def path(self, path):
309 self._path = path
310
311 if re.compile("^/").match(path):
312 self.relative = False
313 else:
314 self.relative = True
315
316 @property
317 def username(self):
318 if self.userinfo:
319 return (self.userinfo.split(":", 1))[0]
320 return ''
321
322 @username.setter
323 def username(self, username):
324 password = self.password
325 self.userinfo = username
326 if password:
327 self.userinfo += ":%s" % password
328
329 @property
330 def password(self):
331 if self.userinfo and ":" in self.userinfo:
332 return (self.userinfo.split(":", 1))[1]
333 return ''
334
335 @password.setter
336 def password(self, password):
337 self.userinfo = "%s:%s" % (self.username, password)
338
339def decodeurl(url):
340 """Decodes an URL into the tokens (scheme, network location, path,
341 user, password, parameters).
342 """
343
344 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
345 if not m:
346 raise MalformedUrl(url)
347
348 type = m.group('type')
349 location = m.group('location')
350 if not location:
351 raise MalformedUrl(url)
352 user = m.group('user')
353 parm = m.group('parm')
354
355 locidx = location.find('/')
356 if locidx != -1 and type.lower() != 'file':
357 host = location[:locidx]
358 path = location[locidx:]
359 else:
360 host = ""
361 path = location
362 if user:
363 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
364 if m:
365 user = m.group('user')
366 pswd = m.group('pswd')
367 else:
368 user = ''
369 pswd = ''
370
371 p = {}
372 if parm:
373 for s in parm.split(';'):
374 s1, s2 = s.split('=')
375 p[s1] = s2
376
377 return type, host, urllib.unquote(path), user, pswd, p
378
379def encodeurl(decoded):
380 """Encodes a URL from tokens (scheme, network location, path,
381 user, password, parameters).
382 """
383
384 type, host, path, user, pswd, p = decoded
385
386 if not path:
387 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
388 if not type:
389 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
390 url = '%s://' % type
391 if user and type != "file":
392 url += "%s" % user
393 if pswd:
394 url += ":%s" % pswd
395 url += "@"
396 if host and type != "file":
397 url += "%s" % host
398 # Standardise path to ensure comparisons work
399 while '//' in path:
400 path = path.replace("//", "/")
401 url += "%s" % urllib.quote(path)
402 if p:
403 for parm in p:
404 url += ";%s=%s" % (parm, p[parm])
405
406 return url
407
408def uri_replace(ud, uri_find, uri_replace, replacements, d):
409 if not ud.url or not uri_find or not uri_replace:
410 logger.error("uri_replace: passed an undefined value, not replacing")
411 return None
412 uri_decoded = list(decodeurl(ud.url))
413 uri_find_decoded = list(decodeurl(uri_find))
414 uri_replace_decoded = list(decodeurl(uri_replace))
415 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
416 result_decoded = ['', '', '', '', '', {}]
417 for loc, i in enumerate(uri_find_decoded):
418 result_decoded[loc] = uri_decoded[loc]
419 regexp = i
420 if loc == 0 and regexp and not regexp.endswith("$"):
421 # Leaving the type unanchored can mean "https" matching "file" can become "files"
422 # which is clearly undesirable.
423 regexp += "$"
424 if loc == 5:
425 # Handle URL parameters
426 if i:
427 # Any specified URL parameters must match
428 for k in uri_replace_decoded[loc]:
429 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
430 return None
431 # Overwrite any specified replacement parameters
432 for k in uri_replace_decoded[loc]:
433 for l in replacements:
434 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
435 result_decoded[loc][k] = uri_replace_decoded[loc][k]
436 elif (re.match(regexp, uri_decoded[loc])):
437 if not uri_replace_decoded[loc]:
438 result_decoded[loc] = ""
439 else:
440 for k in replacements:
441 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
442 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
443 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
444 if loc == 2:
445 # Handle path manipulations
446 basename = None
447 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
448 # If the source and destination url types differ, must be a mirrortarball mapping
449 basename = os.path.basename(ud.mirrortarball)
450 # Kill parameters, they make no sense for mirror tarballs
451 uri_decoded[5] = {}
452 elif ud.localpath and ud.method.supports_checksum(ud):
453 basename = os.path.basename(ud.localpath)
454 if basename and not result_decoded[loc].endswith(basename):
455 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
456 else:
457 return None
458 result = encodeurl(result_decoded)
459 if result == ud.url:
460 return None
461 logger.debug(2, "For url %s returning %s" % (ud.url, result))
462 return result
463
464methods = []
465urldata_cache = {}
466saved_headrevs = {}
467
468def fetcher_init(d):
469 """
470 Called to initialize the fetchers once the configuration data is known.
471 Calls before this must not hit the cache.
472 """
473 # When to drop SCM head revisions controlled by user policy
474 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
475 if srcrev_policy == "cache":
476 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
477 elif srcrev_policy == "clear":
478 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
479 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
480 try:
481 bb.fetch2.saved_headrevs = revs.items()
482 except:
483 pass
484 revs.clear()
485 else:
486 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
487
488 _checksum_cache.init_cache(d)
489
490 for m in methods:
491 if hasattr(m, "init"):
492 m.init(d)
493
494def fetcher_parse_save(d):
495 _checksum_cache.save_extras(d)
496
497def fetcher_parse_done(d):
498 _checksum_cache.save_merge(d)
499
500def fetcher_compare_revisions(d):
501 """
502 Compare the revisions in the persistant cache with current values and
503 return true/false on whether they've changed.
504 """
505
506 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
507 data2 = bb.fetch2.saved_headrevs
508
509 changed = False
510 for key in data:
511 if key not in data2 or data2[key] != data[key]:
512 logger.debug(1, "%s changed", key)
513 changed = True
514 return True
515 else:
516 logger.debug(2, "%s did not change", key)
517 return False
518
519def mirror_from_string(data):
520 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
521
522def verify_checksum(ud, d):
523 """
524 verify the MD5 and SHA256 checksum for downloaded src
525
526 Raises a FetchError if one or both of the SRC_URI checksums do not match
527 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
528 checksums specified.
529
530 """
531
532 if not ud.method.supports_checksum(ud):
533 return
534
535 md5data = bb.utils.md5_file(ud.localpath)
536 sha256data = bb.utils.sha256_file(ud.localpath)
537
538 if ud.method.recommends_checksum(ud):
539 # If strict checking enabled and neither sum defined, raise error
540 strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
541 if strict and not (ud.md5_expected or ud.sha256_expected):
542 logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
543 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
544 (ud.localpath, ud.md5_name, md5data,
545 ud.sha256_name, sha256data))
546 raise NoChecksumError('Missing SRC_URI checksum', ud.url)
547
548 # Log missing sums so user can more easily add them
549 if not ud.md5_expected:
550 logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
551 'SRC_URI[%s] = "%s"',
552 ud.localpath, ud.md5_name, md5data)
553
554 if not ud.sha256_expected:
555 logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
556 'SRC_URI[%s] = "%s"',
557 ud.localpath, ud.sha256_name, sha256data)
558
559 md5mismatch = False
560 sha256mismatch = False
561
562 if ud.md5_expected != md5data:
563 md5mismatch = True
564
565 if ud.sha256_expected != sha256data:
566 sha256mismatch = True
567
568 # We want to alert the user if a checksum is defined in the recipe but
569 # it does not match.
570 msg = ""
571 mismatch = False
572 if md5mismatch and ud.md5_expected:
573 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
574 mismatch = True;
575
576 if sha256mismatch and ud.sha256_expected:
577 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
578 mismatch = True;
579
580 if mismatch:
581 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
582
583 if len(msg):
584 raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
585
586
587def update_stamp(ud, d):
588 """
589 donestamp is file stamp indicating the whole fetching is done
590 this function update the stamp after verifying the checksum
591 """
592 if os.path.exists(ud.donestamp):
593 # Touch the done stamp file to show active use of the download
594 try:
595 os.utime(ud.donestamp, None)
596 except:
597 # Errors aren't fatal here
598 pass
599 else:
600 verify_checksum(ud, d)
601 open(ud.donestamp, 'w').close()
602
603def subprocess_setup():
604 # Python installs a SIGPIPE handler by default. This is usually not what
605 # non-Python subprocesses expect.
606 # SIGPIPE errors are known issues with gzip/bash
607 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
608
609def get_autorev(d):
610 # only not cache src rev in autorev case
611 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
612 d.setVar('__BB_DONT_CACHE', '1')
613 return "AUTOINC"
614
615def get_srcrev(d):
616 """
617 Return the version string for the current package
618 (usually to be used as PV)
619 Most packages usually only have one SCM so we just pass on the call.
620 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
621 have been set.
622 """
623
624 scms = []
625 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
626 urldata = fetcher.ud
627 for u in urldata:
628 if urldata[u].method.supports_srcrev():
629 scms.append(u)
630
631 if len(scms) == 0:
632 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
633
634 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
635 autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
636 if len(rev) > 10:
637 rev = rev[:10]
638 if autoinc:
639 return "AUTOINC+" + rev
640 return rev
641
642 #
643 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
644 #
645 format = d.getVar('SRCREV_FORMAT', True)
646 if not format:
647 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
648
649 seenautoinc = False
650 for scm in scms:
651 ud = urldata[scm]
652 for name in ud.names:
653 autoinc, rev = ud.method.sortable_revision(ud, d, name)
654 seenautoinc = seenautoinc or autoinc
655 if len(rev) > 10:
656 rev = rev[:10]
657 format = format.replace(name, rev)
658 if seenautoinc:
659 format = "AUTOINC+" + format
660
661 return format
662
663def localpath(url, d):
664 fetcher = bb.fetch2.Fetch([url], d)
665 return fetcher.localpath(url)
666
667def runfetchcmd(cmd, d, quiet = False, cleanup = []):
668 """
669 Run cmd returning the command output
670 Raise an error if interrupted or cmd fails
671 Optionally echo command output to stdout
672 Optionally remove the files/directories listed in cleanup upon failure
673 """
674
675 # Need to export PATH as binary could be in metadata paths
676 # rather than host provided
677 # Also include some other variables.
678 # FIXME: Should really include all export varaiables?
679 exportvars = ['HOME', 'PATH',
680 'HTTP_PROXY', 'http_proxy',
681 'HTTPS_PROXY', 'https_proxy',
682 'FTP_PROXY', 'ftp_proxy',
683 'FTPS_PROXY', 'ftps_proxy',
684 'NO_PROXY', 'no_proxy',
685 'ALL_PROXY', 'all_proxy',
686 'GIT_PROXY_COMMAND',
687 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
688 'SOCKS5_USER', 'SOCKS5_PASSWD']
689
690 for var in exportvars:
691 val = d.getVar(var, True)
692 if val:
693 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
694
695 logger.debug(1, "Running %s", cmd)
696
697 success = False
698 error_message = ""
699
700 try:
701 (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
702 success = True
703 except bb.process.NotFoundError as e:
704 error_message = "Fetch command %s" % (e.command)
705 except bb.process.ExecutionError as e:
706 if e.stdout:
707 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
708 elif e.stderr:
709 output = "output:\n%s" % e.stderr
710 else:
711 output = "no output"
712 error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
713 except bb.process.CmdError as e:
714 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
715 if not success:
716 for f in cleanup:
717 try:
718 bb.utils.remove(f, True)
719 except OSError:
720 pass
721
722 raise FetchError(error_message)
723
724 return output
725
726def check_network_access(d, info = "", url = None):
727 """
728 log remote network access, and error if BB_NO_NETWORK is set
729 """
730 if d.getVar("BB_NO_NETWORK", True) == "1":
731 raise NetworkAccess(url, info)
732 else:
733 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
734
735def build_mirroruris(origud, mirrors, ld):
736 uris = []
737 uds = []
738
739 replacements = {}
740 replacements["TYPE"] = origud.type
741 replacements["HOST"] = origud.host
742 replacements["PATH"] = origud.path
743 replacements["BASENAME"] = origud.path.split("/")[-1]
744 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
745
746 def adduri(ud, uris, uds):
747 for line in mirrors:
748 try:
749 (find, replace) = line
750 except ValueError:
751 continue
752 newuri = uri_replace(ud, find, replace, replacements, ld)
753 if not newuri or newuri in uris or newuri == origud.url:
754 continue
755 try:
756 newud = FetchData(newuri, ld)
757 newud.setup_localpath(ld)
758 except bb.fetch2.BBFetchException as e:
759 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
760 logger.debug(1, str(e))
761 try:
762 ud.method.clean(ud, ld)
763 except UnboundLocalError:
764 pass
765 continue
766 uris.append(newuri)
767 uds.append(newud)
768
769 adduri(newud, uris, uds)
770
771 adduri(origud, uris, uds)
772
773 return uris, uds
774
775def rename_bad_checksum(ud, suffix):
776 """
777 Renames files to have suffix from parameter
778 """
779
780 if ud.localpath is None:
781 return
782
783 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
784 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
785 bb.utils.movefile(ud.localpath, new_localpath)
786
787
788def try_mirror_url(origud, ud, ld, check = False):
789 # Return of None or a value means we're finished
790 # False means try another url
791 try:
792 if check:
793 found = ud.method.checkstatus(ud, ld)
794 if found:
795 return found
796 return False
797
798 os.chdir(ld.getVar("DL_DIR", True))
799
800 if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
801 ud.method.download(ud, ld)
802 if hasattr(ud.method,"build_mirror_data"):
803 ud.method.build_mirror_data(ud, ld)
804
805 if not ud.localpath or not os.path.exists(ud.localpath):
806 return False
807
808 if ud.localpath == origud.localpath:
809 return ud.localpath
810
811 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
812 # If that tarball is a local file:// we need to provide a symlink to it
813 dldir = ld.getVar("DL_DIR", True)
814 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
815 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
816 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
817 open(ud.donestamp, 'w').close()
818 dest = os.path.join(dldir, os.path.basename(ud.localpath))
819 if not os.path.exists(dest):
820 os.symlink(ud.localpath, dest)
821 if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
822 origud.method.download(origud, ld)
823 if hasattr(origud.method,"build_mirror_data"):
824 origud.method.build_mirror_data(origud, ld)
825 return ud.localpath
826 # Otherwise the result is a local file:// and we symlink to it
827 if not os.path.exists(origud.localpath):
828 if os.path.islink(origud.localpath):
829 # Broken symbolic link
830 os.unlink(origud.localpath)
831
832 os.symlink(ud.localpath, origud.localpath)
833 update_stamp(origud, ld)
834 return ud.localpath
835
836 except bb.fetch2.NetworkAccess:
837 raise
838
839 except bb.fetch2.BBFetchException as e:
840 if isinstance(e, ChecksumError):
841 logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
842 logger.warn(str(e))
843 rename_bad_checksum(ud, e.checksum)
844 elif isinstance(e, NoChecksumError):
845 raise
846 else:
847 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
848 logger.debug(1, str(e))
849 try:
850 ud.method.clean(ud, ld)
851 except UnboundLocalError:
852 pass
853 return False
854
855def try_mirrors(d, origud, mirrors, check = False):
856 """
857 Try to use a mirrored version of the sources.
858 This method will be automatically called before the fetchers go.
859
860 d Is a bb.data instance
861 uri is the original uri we're trying to download
862 mirrors is the list of mirrors we're going to try
863 """
864 ld = d.createCopy()
865
866 uris, uds = build_mirroruris(origud, mirrors, ld)
867
868 for index, uri in enumerate(uris):
869 ret = try_mirror_url(origud, uds[index], ld, check)
870 if ret != False:
871 return ret
872 return None
873
874def srcrev_internal_helper(ud, d, name):
875 """
876 Return:
877 a) a source revision if specified
878 b) latest revision if SRCREV="AUTOINC"
879 c) None if not specified
880 """
881
882 srcrev = None
883 pn = d.getVar("PN", True)
884 attempts = []
885 if name != '' and pn:
886 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
887 if name != '':
888 attempts.append("SRCREV_%s" % name)
889 if pn:
890 attempts.append("SRCREV_pn-%s" % pn)
891 attempts.append("SRCREV")
892
893 for a in attempts:
894 srcrev = d.getVar(a, True)
895 if srcrev and srcrev != "INVALID":
896 break
897
898 if 'rev' in ud.parm and 'tag' in ud.parm:
899 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
900
901 if 'rev' in ud.parm or 'tag' in ud.parm:
902 if 'rev' in ud.parm:
903 parmrev = ud.parm['rev']
904 else:
905 parmrev = ud.parm['tag']
906 if srcrev == "INVALID" or not srcrev:
907 return parmrev
908 if srcrev != parmrev:
909 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
910 return parmrev
911
912 if srcrev == "INVALID" or not srcrev:
913 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
914 if srcrev == "AUTOINC":
915 srcrev = ud.method.latest_revision(ud, d, name)
916
917 return srcrev
918
919def get_checksum_file_list(d):
920 """ Get a list of files checksum in SRC_URI
921
922 Returns the resolved local paths of all local file entries in
923 SRC_URI as a space-separated string
924 """
925 fetch = Fetch([], d, cache = False, localonly = True)
926
927 dl_dir = d.getVar('DL_DIR', True)
928 filelist = []
929 for u in fetch.urls:
930 ud = fetch.ud[u]
931
932 if ud and isinstance(ud.method, local.Local):
933 ud.setup_localpath(d)
934 f = ud.localpath
935 pth = ud.decodedurl
936 if '*' in pth:
937 f = os.path.join(os.path.abspath(f), pth)
938 if f.startswith(dl_dir):
939 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
940 if os.path.exists(f):
941 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
942 else:
943 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
944 filelist.append(f)
945
946 return " ".join(filelist)
947
948
949def get_file_checksums(filelist, pn):
950 """Get a list of the checksums for a list of local files
951
952 Returns the checksums for a list of local files, caching the results as
953 it proceeds
954
955 """
956
957 def checksum_file(f):
958 try:
959 checksum = _checksum_cache.get_checksum(f)
960 except OSError as e:
961 bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
962 return None
963 return checksum
964
965 def checksum_dir(pth):
966 # Handle directories recursively
967 dirchecksums = []
968 for root, dirs, files in os.walk(pth):
969 for name in files:
970 fullpth = os.path.join(root, name)
971 checksum = checksum_file(fullpth)
972 if checksum:
973 dirchecksums.append((fullpth, checksum))
974 return dirchecksums
975
976 checksums = []
977 for pth in filelist.split():
978 checksum = None
979 if '*' in pth:
980 # Handle globs
981 for f in glob.glob(pth):
982 if os.path.isdir(f):
983 checksums.extend(checksum_dir(f))
984 else:
985 checksum = checksum_file(f)
986 if checksum:
987 checksums.append((f, checksum))
988 continue
989 elif os.path.isdir(pth):
990 checksums.extend(checksum_dir(pth))
991 continue
992 else:
993 checksum = checksum_file(pth)
994
995 if checksum:
996 checksums.append((pth, checksum))
997
998 checksums.sort(key=operator.itemgetter(1))
999 return checksums
1000
1001
1002class FetchData(object):
1003 """
1004 A class which represents the fetcher state for a given URI.
1005 """
1006 def __init__(self, url, d, localonly = False):
1007 # localpath is the location of a downloaded result. If not set, the file is local.
1008 self.donestamp = None
1009 self.localfile = ""
1010 self.localpath = None
1011 self.lockfile = None
1012 self.mirrortarball = None
1013 self.basename = None
1014 self.basepath = None
1015 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
1016 self.date = self.getSRCDate(d)
1017 self.url = url
1018 if not self.user and "user" in self.parm:
1019 self.user = self.parm["user"]
1020 if not self.pswd and "pswd" in self.parm:
1021 self.pswd = self.parm["pswd"]
1022 self.setup = False
1023
1024 if "name" in self.parm:
1025 self.md5_name = "%s.md5sum" % self.parm["name"]
1026 self.sha256_name = "%s.sha256sum" % self.parm["name"]
1027 else:
1028 self.md5_name = "md5sum"
1029 self.sha256_name = "sha256sum"
1030 if self.md5_name in self.parm:
1031 self.md5_expected = self.parm[self.md5_name]
1032 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1033 self.md5_expected = None
1034 else:
1035 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
1036 if self.sha256_name in self.parm:
1037 self.sha256_expected = self.parm[self.sha256_name]
1038 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1039 self.sha256_expected = None
1040 else:
1041 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
1042
1043 self.names = self.parm.get("name",'default').split(',')
1044
1045 self.method = None
1046 for m in methods:
1047 if m.supports(self, d):
1048 self.method = m
1049 break
1050
1051 if not self.method:
1052 raise NoMethodError(url)
1053
1054 if localonly and not isinstance(self.method, local.Local):
1055 raise NonLocalMethod()
1056
1057 if self.parm.get("proto", None) and "protocol" not in self.parm:
1058 logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
1059 self.parm["protocol"] = self.parm.get("proto", None)
1060
1061 if hasattr(self.method, "urldata_init"):
1062 self.method.urldata_init(self, d)
1063
1064 if "localpath" in self.parm:
1065 # if user sets localpath for file, use it instead.
1066 self.localpath = self.parm["localpath"]
1067 self.basename = os.path.basename(self.localpath)
1068 elif self.localfile:
1069 self.localpath = self.method.localpath(self, d)
1070
1071 dldir = d.getVar("DL_DIR", True)
1072 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1073 if self.localpath and self.localpath.startswith(dldir):
1074 basepath = self.localpath
1075 elif self.localpath:
1076 basepath = dldir + os.sep + os.path.basename(self.localpath)
1077 else:
1078 basepath = dldir + os.sep + (self.basepath or self.basename)
1079 self.donestamp = basepath + '.done'
1080 self.lockfile = basepath + '.lock'
1081
1082 def setup_revisons(self, d):
1083 self.revisions = {}
1084 for name in self.names:
1085 self.revisions[name] = srcrev_internal_helper(self, d, name)
1086
1087 # add compatibility code for non name specified case
1088 if len(self.names) == 1:
1089 self.revision = self.revisions[self.names[0]]
1090
1091 def setup_localpath(self, d):
1092 if not self.localpath:
1093 self.localpath = self.method.localpath(self, d)
1094
1095 def getSRCDate(self, d):
1096 """
1097 Return the SRC Date for the component
1098
1099 d the bb.data module
1100 """
1101 if "srcdate" in self.parm:
1102 return self.parm['srcdate']
1103
1104 pn = d.getVar("PN", True)
1105
1106 if pn:
1107 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1108
1109 return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1110
1111class FetchMethod(object):
1112 """Base class for 'fetch'ing data"""
1113
1114 def __init__(self, urls = []):
1115 self.urls = []
1116
1117 def supports(self, urldata, d):
1118 """
1119 Check to see if this fetch class supports a given url.
1120 """
1121 return 0
1122
1123 def localpath(self, urldata, d):
1124 """
1125 Return the local filename of a given url assuming a successful fetch.
1126 Can also setup variables in urldata for use in go (saving code duplication
1127 and duplicate code execution)
1128 """
1129 return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
1130
1131 def supports_checksum(self, urldata):
1132 """
1133 Is localpath something that can be represented by a checksum?
1134 """
1135
1136 # We cannot compute checksums for directories
1137 if os.path.isdir(urldata.localpath) == True:
1138 return False
1139 if urldata.localpath.find("*") != -1:
1140 return False
1141
1142 return True
1143
1144 def recommends_checksum(self, urldata):
1145 """
1146 Is the backend on where checksumming is recommended (should warnings
1147 be displayed if there is no checksum)?
1148 """
1149 return False
1150
1151 def _strip_leading_slashes(self, relpath):
1152 """
1153 Remove leading slash as os.path.join can't cope
1154 """
1155 while os.path.isabs(relpath):
1156 relpath = relpath[1:]
1157 return relpath
1158
1159 def setUrls(self, urls):
1160 self.__urls = urls
1161
1162 def getUrls(self):
1163 return self.__urls
1164
1165 urls = property(getUrls, setUrls, None, "Urls property")
1166
1167 def need_update(self, ud, d):
1168 """
1169 Force a fetch, even if localpath exists?
1170 """
1171 if os.path.exists(ud.localpath):
1172 return False
1173 return True
1174
1175 def supports_srcrev(self):
1176 """
1177 The fetcher supports auto source revisions (SRCREV)
1178 """
1179 return False
1180
1181 def download(self, urldata, d):
1182 """
1183 Fetch urls
1184 Assumes localpath was called first
1185 """
1186 raise NoMethodError(url)
1187
1188 def unpack(self, urldata, rootdir, data):
1189 iterate = False
1190 file = urldata.localpath
1191
1192 try:
1193 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1194 except ValueError as exc:
1195 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1196 (file, urldata.parm.get('unpack')))
1197
1198 dots = file.split(".")
1199 if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
1200 efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
1201 else:
1202 efile = file
1203 cmd = None
1204
1205 if unpack:
1206 if file.endswith('.tar'):
1207 cmd = 'tar x --no-same-owner -f %s' % file
1208 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1209 cmd = 'tar xz --no-same-owner -f %s' % file
1210 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1211 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1212 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1213 cmd = 'gzip -dc %s > %s' % (file, efile)
1214 elif file.endswith('.bz2'):
1215 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1216 elif file.endswith('.tar.xz'):
1217 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1218 elif file.endswith('.xz'):
1219 cmd = 'xz -dc %s > %s' % (file, efile)
1220 elif file.endswith('.zip') or file.endswith('.jar'):
1221 try:
1222 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1223 except ValueError as exc:
1224 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1225 (file, urldata.parm.get('dos')))
1226 cmd = 'unzip -q -o'
1227 if dos:
1228 cmd = '%s -a' % cmd
1229 cmd = "%s '%s'" % (cmd, file)
1230 elif file.endswith('.rpm') or file.endswith('.srpm'):
1231 if 'extract' in urldata.parm:
1232 unpack_file = urldata.parm.get('extract')
1233 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1234 iterate = True
1235 iterate_file = unpack_file
1236 else:
1237 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1238 elif file.endswith('.deb') or file.endswith('.ipk'):
1239 cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
1240
1241 if not unpack or not cmd:
1242 # If file == dest, then avoid any copies, as we already put the file into dest!
1243 dest = os.path.join(rootdir, os.path.basename(file))
1244 if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1245 if os.path.isdir(file):
1246 # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
1247 basepath = getattr(urldata, "basepath", None)
1248 destdir = "."
1249 if basepath and basepath.endswith("/"):
1250 basepath = basepath.rstrip("/")
1251 elif basepath:
1252 basepath = os.path.dirname(basepath)
1253 if basepath and basepath.find("/") != -1:
1254 destdir = basepath[:basepath.rfind('/')]
1255 destdir = destdir.strip('/')
1256 if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
1257 os.makedirs("%s/%s" % (rootdir, destdir))
1258 cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir)
1259 #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
1260 else:
1261 # The "destdir" handling was specifically done for FILESPATH
1262 # items. So, only do so for file:// entries.
1263 if urldata.type == "file" and urldata.path.find("/") != -1:
1264 destdir = urldata.path.rsplit("/", 1)[0]
1265 else:
1266 destdir = "."
1267 bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
1268 cmd = 'cp %s %s/%s/' % (file, rootdir, destdir)
1269
1270 if not cmd:
1271 return
1272
1273 # Change to subdir before executing command
1274 save_cwd = os.getcwd();
1275 os.chdir(rootdir)
1276 if 'subdir' in urldata.parm:
1277 newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
1278 bb.utils.mkdirhier(newdir)
1279 os.chdir(newdir)
1280
1281 path = data.getVar('PATH', True)
1282 if path:
1283 cmd = "PATH=\"%s\" %s" % (path, cmd)
1284 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
1285 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
1286
1287 os.chdir(save_cwd)
1288
1289 if ret != 0:
1290 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1291
1292 if iterate is True:
1293 iterate_urldata = urldata
1294 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1295 self.unpack(urldata, rootdir, data)
1296
1297 return
1298
1299 def clean(self, urldata, d):
1300 """
1301 Clean any existing full or partial download
1302 """
1303 bb.utils.remove(urldata.localpath)
1304
1305 def try_premirror(self, urldata, d):
1306 """
1307 Should premirrors be used?
1308 """
1309 return True
1310
1311 def checkstatus(self, urldata, d):
1312 """
1313 Check the status of a URL
1314 Assumes localpath was called first
1315 """
1316 logger.info("URL %s could not be checked for status since no method exists.", url)
1317 return True
1318
1319 def latest_revision(self, ud, d, name):
1320 """
1321 Look in the cache for the latest revision, if not present ask the SCM.
1322 """
1323 if not hasattr(self, "_latest_revision"):
1324 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1325
1326 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1327 key = self.generate_revision_key(ud, d, name)
1328 try:
1329 return revs[key]
1330 except KeyError:
1331 revs[key] = rev = self._latest_revision(ud, d, name)
1332 return rev
1333
1334 def sortable_revision(self, ud, d, name):
1335 latest_rev = self._build_revision(ud, d, name)
1336 return True, str(latest_rev)
1337
1338 def generate_revision_key(self, ud, d, name):
1339 key = self._revision_key(ud, d, name)
1340 return "%s-%s" % (key, d.getVar("PN", True) or "")
1341
1342class Fetch(object):
1343 def __init__(self, urls, d, cache = True, localonly = False):
1344 if localonly and cache:
1345 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1346
1347 if len(urls) == 0:
1348 urls = d.getVar("SRC_URI", True).split()
1349 self.urls = urls
1350 self.d = d
1351 self.ud = {}
1352
1353 fn = d.getVar('FILE', True)
1354 if cache and fn and fn in urldata_cache:
1355 self.ud = urldata_cache[fn]
1356
1357 for url in urls:
1358 if url not in self.ud:
1359 try:
1360 self.ud[url] = FetchData(url, d, localonly)
1361 except NonLocalMethod:
1362 if localonly:
1363 self.ud[url] = None
1364 pass
1365
1366 if fn and cache:
1367 urldata_cache[fn] = self.ud
1368
1369 def localpath(self, url):
1370 if url not in self.urls:
1371 self.ud[url] = FetchData(url, self.d)
1372
1373 self.ud[url].setup_localpath(self.d)
1374 return self.d.expand(self.ud[url].localpath)
1375
1376 def localpaths(self):
1377 """
1378 Return a list of the local filenames, assuming successful fetch
1379 """
1380 local = []
1381
1382 for u in self.urls:
1383 ud = self.ud[u]
1384 ud.setup_localpath(self.d)
1385 local.append(ud.localpath)
1386
1387 return local
1388
1389 def download(self, urls = []):
1390 """
1391 Fetch all urls
1392 """
1393 if len(urls) == 0:
1394 urls = self.urls
1395
1396 network = self.d.getVar("BB_NO_NETWORK", True)
1397 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1398
1399 for u in urls:
1400 ud = self.ud[u]
1401 ud.setup_localpath(self.d)
1402 m = ud.method
1403 localpath = ""
1404
1405 lf = bb.utils.lockfile(ud.lockfile)
1406
1407 try:
1408 self.d.setVar("BB_NO_NETWORK", network)
1409
1410 if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
1411 localpath = ud.localpath
1412 elif m.try_premirror(ud, self.d):
1413 logger.debug(1, "Trying PREMIRRORS")
1414 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1415 localpath = try_mirrors(self.d, ud, mirrors, False)
1416
1417 if premirroronly:
1418 self.d.setVar("BB_NO_NETWORK", "1")
1419
1420 os.chdir(self.d.getVar("DL_DIR", True))
1421
1422 firsterr = None
1423 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
1424 try:
1425 logger.debug(1, "Trying Upstream")
1426 m.download(ud, self.d)
1427 if hasattr(m, "build_mirror_data"):
1428 m.build_mirror_data(ud, self.d)
1429 localpath = ud.localpath
1430 # early checksum verify, so that if checksum mismatched,
1431 # fetcher still have chance to fetch from mirror
1432 update_stamp(ud, self.d)
1433
1434 except bb.fetch2.NetworkAccess:
1435 raise
1436
1437 except BBFetchException as e:
1438 if isinstance(e, ChecksumError):
1439 logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1440 logger.debug(1, str(e))
1441 rename_bad_checksum(ud, e.checksum)
1442 elif isinstance(e, NoChecksumError):
1443 raise
1444 else:
1445 logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1446 logger.debug(1, str(e))
1447 firsterr = e
1448 # Remove any incomplete fetch
1449 m.clean(ud, self.d)
1450 logger.debug(1, "Trying MIRRORS")
1451 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1452 localpath = try_mirrors (self.d, ud, mirrors)
1453
1454 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1455 if firsterr:
1456 logger.error(str(firsterr))
1457 raise FetchError("Unable to fetch URL from any source.", u)
1458
1459 update_stamp(ud, self.d)
1460
1461 except BBFetchException as e:
1462 if isinstance(e, ChecksumError):
1463 logger.error("Checksum failure fetching %s" % u)
1464 raise
1465
1466 finally:
1467 bb.utils.unlockfile(lf)
1468
1469 def checkstatus(self, urls = []):
1470 """
1471 Check all urls exist upstream
1472 """
1473
1474 if len(urls) == 0:
1475 urls = self.urls
1476
1477 for u in urls:
1478 ud = self.ud[u]
1479 ud.setup_localpath(self.d)
1480 m = ud.method
1481 logger.debug(1, "Testing URL %s", u)
1482 # First try checking uri, u, from PREMIRRORS
1483 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1484 ret = try_mirrors(self.d, ud, mirrors, True)
1485 if not ret:
1486 # Next try checking from the original uri, u
1487 try:
1488 ret = m.checkstatus(ud, self.d)
1489 except:
1490 # Finally, try checking uri, u, from MIRRORS
1491 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1492 ret = try_mirrors(self.d, ud, mirrors, True)
1493
1494 if not ret:
1495 raise FetchError("URL %s doesn't work" % u, u)
1496
1497 def unpack(self, root, urls = []):
1498 """
1499 Check all urls exist upstream
1500 """
1501
1502 if len(urls) == 0:
1503 urls = self.urls
1504
1505 for u in urls:
1506 ud = self.ud[u]
1507 ud.setup_localpath(self.d)
1508
1509 if self.d.expand(self.localpath) is None:
1510 continue
1511
1512 if ud.lockfile:
1513 lf = bb.utils.lockfile(ud.lockfile)
1514
1515 ud.method.unpack(ud, root, self.d)
1516
1517 if ud.lockfile:
1518 bb.utils.unlockfile(lf)
1519
1520 def clean(self, urls = []):
1521 """
1522 Clean files that the fetcher gets or places
1523 """
1524
1525 if len(urls) == 0:
1526 urls = self.urls
1527
1528 for url in urls:
1529 if url not in self.ud:
1530 self.ud[url] = FetchData(url, d)
1531 ud = self.ud[url]
1532 ud.setup_localpath(self.d)
1533
1534 if not ud.localfile and ud.localpath is None:
1535 continue
1536
1537 if ud.lockfile:
1538 lf = bb.utils.lockfile(ud.lockfile)
1539
1540 ud.method.clean(ud, self.d)
1541 if ud.donestamp:
1542 bb.utils.remove(ud.donestamp)
1543
1544 if ud.lockfile:
1545 bb.utils.unlockfile(lf)
1546
1547from . import cvs
1548from . import git
1549from . import gitsm
1550from . import gitannex
1551from . import local
1552from . import svn
1553from . import wget
1554from . import ssh
1555from . import sftp
1556from . import perforce
1557from . import bzr
1558from . import hg
1559from . import osc
1560from . import repo
1561
1562methods.append(local.Local())
1563methods.append(wget.Wget())
1564methods.append(svn.Svn())
1565methods.append(git.Git())
1566methods.append(gitsm.GitSM())
1567methods.append(gitannex.GitANNEX())
1568methods.append(cvs.Cvs())
1569methods.append(ssh.SSH())
1570methods.append(sftp.SFTP())
1571methods.append(perforce.Perforce())
1572methods.append(bzr.Bzr())
1573methods.append(hg.Hg())
1574methods.append(osc.Osc())
1575methods.append(repo.Repo())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
new file mode 100644
index 0000000000..03e9ac461b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/bzr.py
@@ -0,0 +1,143 @@
1"""
2BitBake 'Fetch' implementation for bzr.
3
4"""
5
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 Richard Purdie
8#
9# Classes for obtaining upstream sources for the
10# BitBake build tools.
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import sys
28import logging
29import bb
30from bb import data
31from bb.fetch2 import FetchMethod
32from bb.fetch2 import FetchError
33from bb.fetch2 import runfetchcmd
34from bb.fetch2 import logger
35
36class Bzr(FetchMethod):
37 def supports(self, ud, d):
38 return ud.type in ['bzr']
39
40 def urldata_init(self, ud, d):
41 """
42 init bzr specific variable within url data
43 """
44 # Create paths to bzr checkouts
45 relpath = self._strip_leading_slashes(ud.path)
46 ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
47
48 ud.setup_revisons(d)
49
50 if not ud.revision:
51 ud.revision = self.latest_revision(ud, d)
52
53 ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
54
55 def _buildbzrcommand(self, ud, d, command):
56 """
57 Build up an bzr commandline based on ud
58 command is "fetch", "update", "revno"
59 """
60
61 basecmd = data.expand('${FETCHCMD_bzr}', d)
62
63 proto = ud.parm.get('protocol', 'http')
64
65 bzrroot = ud.host + ud.path
66
67 options = []
68
69 if command == "revno":
70 bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
71 else:
72 if ud.revision:
73 options.append("-r %s" % ud.revision)
74
75 if command == "fetch":
76 bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
77 elif command == "update":
78 bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
79 else:
80 raise FetchError("Invalid bzr command %s" % command, ud.url)
81
82 return bzrcmd
83
84 def download(self, ud, d):
85 """Fetch url"""
86
87 if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
88 bzrcmd = self._buildbzrcommand(ud, d, "update")
89 logger.debug(1, "BZR Update %s", ud.url)
90 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
91 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
92 runfetchcmd(bzrcmd, d)
93 else:
94 bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
95 bzrcmd = self._buildbzrcommand(ud, d, "fetch")
96 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
97 logger.debug(1, "BZR Checkout %s", ud.url)
98 bb.utils.mkdirhier(ud.pkgdir)
99 os.chdir(ud.pkgdir)
100 logger.debug(1, "Running %s", bzrcmd)
101 runfetchcmd(bzrcmd, d)
102
103 os.chdir(ud.pkgdir)
104
105 scmdata = ud.parm.get("scmdata", "")
106 if scmdata == "keep":
107 tar_flags = ""
108 else:
109 tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
110
111 # tar them up to a defined filename
112 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
113
114 def supports_srcrev(self):
115 return True
116
117 def _revision_key(self, ud, d, name):
118 """
119 Return a unique key for the url
120 """
121 return "bzr:" + ud.pkgdir
122
123 def _latest_revision(self, ud, d, name):
124 """
125 Return the latest upstream revision number
126 """
127 logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
128
129 bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
130
131 output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
132
133 return output.strip()
134
135 def sortable_revision(self, ud, d, name):
136 """
137 Return a sortable revision number which in our case is the revision number
138 """
139
140 return False, self._build_revision(ud, d)
141
142 def _build_revision(self, ud, d):
143 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
new file mode 100644
index 0000000000..d27d96f68c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -0,0 +1,171 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27#
28
29import os
30import logging
31import bb
32from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
33from bb.fetch2 import runfetchcmd
34
35class Cvs(FetchMethod):
36 """
37 Class to fetch a module or modules from cvs repositories
38 """
39 def supports(self, ud, d):
40 """
41 Check to see if a given url can be fetched with cvs.
42 """
43 return ud.type in ['cvs']
44
45 def urldata_init(self, ud, d):
46 if not "module" in ud.parm:
47 raise MissingParameterError("module", ud.url)
48 ud.module = ud.parm["module"]
49
50 ud.tag = ud.parm.get('tag', "")
51
52 # Override the default date in certain cases
53 if 'date' in ud.parm:
54 ud.date = ud.parm['date']
55 elif ud.tag:
56 ud.date = ""
57
58 norecurse = ''
59 if 'norecurse' in ud.parm:
60 norecurse = '_norecurse'
61
62 fullpath = ''
63 if 'fullpath' in ud.parm:
64 fullpath = '_fullpath'
65
66 ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
67
68 def need_update(self, ud, d):
69 if (ud.date == "now"):
70 return True
71 if not os.path.exists(ud.localpath):
72 return True
73 return False
74
75 def download(self, ud, d):
76
77 method = ud.parm.get('method', 'pserver')
78 localdir = ud.parm.get('localdir', ud.module)
79 cvs_port = ud.parm.get('port', '')
80
81 cvs_rsh = None
82 if method == "ext":
83 if "rsh" in ud.parm:
84 cvs_rsh = ud.parm["rsh"]
85
86 if method == "dir":
87 cvsroot = ud.path
88 else:
89 cvsroot = ":" + method
90 cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
91 if cvsproxyhost:
92 cvsroot += ";proxy=" + cvsproxyhost
93 cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
94 if cvsproxyport:
95 cvsroot += ";proxyport=" + cvsproxyport
96 cvsroot += ":" + ud.user
97 if ud.pswd:
98 cvsroot += ":" + ud.pswd
99 cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
100
101 options = []
102 if 'norecurse' in ud.parm:
103 options.append("-l")
104 if ud.date:
105 # treat YYYYMMDDHHMM specially for CVS
106 if len(ud.date) == 12:
107 options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
108 else:
109 options.append("-D \"%s UTC\"" % ud.date)
110 if ud.tag:
111 options.append("-r %s" % ud.tag)
112
113 cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
114 cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
115 cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
116
117 if cvs_rsh:
118 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
119 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
120
121 # create module directory
122 logger.debug(2, "Fetch: checking for module directory")
123 pkg = d.getVar('PN', True)
124 pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
125 moddir = os.path.join(pkgdir, localdir)
126 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
127 logger.info("Update " + ud.url)
128 bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
129 # update sources there
130 os.chdir(moddir)
131 cmd = cvsupdatecmd
132 else:
133 logger.info("Fetch " + ud.url)
134 # check out sources there
135 bb.utils.mkdirhier(pkgdir)
136 os.chdir(pkgdir)
137 logger.debug(1, "Running %s", cvscmd)
138 bb.fetch2.check_network_access(d, cvscmd, ud.url)
139 cmd = cvscmd
140
141 runfetchcmd(cmd, d, cleanup = [moddir])
142
143 if not os.access(moddir, os.R_OK):
144 raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
145
146 scmdata = ud.parm.get("scmdata", "")
147 if scmdata == "keep":
148 tar_flags = ""
149 else:
150 tar_flags = "--exclude 'CVS'"
151
152 # tar them up to a defined filename
153 if 'fullpath' in ud.parm:
154 os.chdir(pkgdir)
155 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
156 else:
157 os.chdir(moddir)
158 os.chdir('..')
159 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
160
161 runfetchcmd(cmd, d, cleanup = [ud.localpath])
162
163 def clean(self, ud, d):
164 """ Clean CVS Files and tarballs """
165
166 pkg = d.getVar('PN', True)
167 pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
168
169 bb.utils.remove(pkgdir, True)
170 bb.utils.remove(ud.localpath)
171
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
new file mode 100644
index 0000000000..9ca24428a1
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -0,0 +1,355 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git implementation
5
6git fetcher support the SRC_URI with format of:
7SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
8
9Supported SRC_URI options are:
10
11- branch
12 The git branch to retrieve from. The default is "master"
13
14 This option also supports multiple branch fetching, with branches
15 separated by commas. In multiple branches case, the name option
16 must have the same number of names to match the branches, which is
17 used to specify the SRC_REV for the branch
18 e.g:
19 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
20 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
21 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
22
23- tag
24 The git tag to retrieve. The default is "master"
25
26- protocol
27 The method to use to access the repository. Common options are "git",
28 "http", "https", "file", "ssh" and "rsync". The default is "git".
29
30- rebaseable
31 rebaseable indicates that the upstream git repo may rebase in the future,
32 and current revision may disappear from upstream repo. This option will
33 remind fetcher to preserve local cache carefully for future use.
34 The default value is "0", set rebaseable=1 for rebaseable git repo.
35
36- nocheckout
37 Don't checkout source code when unpacking. set this option for the recipe
38 who has its own routine to checkout code.
39 The default is "0", set nocheckout=1 if needed.
40
41- bareclone
42 Create a bare clone of the source code and don't checkout the source code
43 when unpacking. Set this option for the recipe who has its own routine to
44 checkout code and tracking branch requirements.
45 The default is "0", set bareclone=1 if needed.
46
47- nobranch
48 Don't check the SHA validation for branch. set this option for the recipe
49 referring to commit which is valid in tag instead of branch.
50 The default is "0", set nobranch=1 if needed.
51
52"""
53
54#Copyright (C) 2005 Richard Purdie
55#
56# This program is free software; you can redistribute it and/or modify
57# it under the terms of the GNU General Public License version 2 as
58# published by the Free Software Foundation.
59#
60# This program is distributed in the hope that it will be useful,
61# but WITHOUT ANY WARRANTY; without even the implied warranty of
62# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
63# GNU General Public License for more details.
64#
65# You should have received a copy of the GNU General Public License along
66# with this program; if not, write to the Free Software Foundation, Inc.,
67# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
68
69import os
70import bb
71from bb import data
72from bb.fetch2 import FetchMethod
73from bb.fetch2 import runfetchcmd
74from bb.fetch2 import logger
75
76class Git(FetchMethod):
77 """Class to fetch a module or modules from git repositories"""
78 def init(self, d):
79 pass
80
81 def supports(self, ud, d):
82 """
83 Check to see if a given url can be fetched with git.
84 """
85 return ud.type in ['git']
86
87 def supports_checksum(self, urldata):
88 return False
89
90 def urldata_init(self, ud, d):
91 """
92 init git specific variable within url data
93 so that the git method like latest_revision() can work
94 """
95 if 'protocol' in ud.parm:
96 ud.proto = ud.parm['protocol']
97 elif not ud.host:
98 ud.proto = 'file'
99 else:
100 ud.proto = "git"
101
102 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
103 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
104
105 ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
106
107 ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
108
109 ud.nobranch = ud.parm.get("nobranch","0") == "1"
110
111 # bareclone implies nocheckout
112 ud.bareclone = ud.parm.get("bareclone","0") == "1"
113 if ud.bareclone:
114 ud.nocheckout = 1
115
116 ud.unresolvedrev = {}
117 branches = ud.parm.get("branch", "master").split(',')
118 if len(branches) != len(ud.names):
119 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
120 ud.branches = {}
121 for name in ud.names:
122 branch = branches[ud.names.index(name)]
123 ud.branches[name] = branch
124 ud.unresolvedrev[name] = branch
125
126 ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
127
128 ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
129
130 ud.setup_revisons(d)
131
132 for name in ud.names:
133 # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
134 if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
135 if ud.revisions[name]:
136 ud.unresolvedrev[name] = ud.revisions[name]
137 ud.revisions[name] = self.latest_revision(ud, d, name)
138
139 gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
140 # for rebaseable git repo, it is necessary to keep mirror tar ball
141 # per revision, so that even the revision disappears from the
142 # upstream repo in the future, the mirror will remain intact and still
143 # contains the revision
144 if ud.rebaseable:
145 for name in ud.names:
146 gitsrcname = gitsrcname + '_' + ud.revisions[name]
147 ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
148 ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
149 gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
150 ud.clonedir = os.path.join(gitdir, gitsrcname)
151
152 ud.localfile = ud.clonedir
153
154 def localpath(self, ud, d):
155 return ud.clonedir
156
157 def need_update(self, ud, d):
158 if not os.path.exists(ud.clonedir):
159 return True
160 os.chdir(ud.clonedir)
161 for name in ud.names:
162 if not self._contains_ref(ud, d, name):
163 return True
164 if ud.write_tarballs and not os.path.exists(ud.fullmirror):
165 return True
166 return False
167
168 def try_premirror(self, ud, d):
169 # If we don't do this, updating an existing checkout with only premirrors
170 # is not possible
171 if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
172 return True
173 if os.path.exists(ud.clonedir):
174 return False
175 return True
176
177 def download(self, ud, d):
178 """Fetch url"""
179
180 if ud.user:
181 username = ud.user + '@'
182 else:
183 username = ""
184
185 ud.repochanged = not os.path.exists(ud.fullmirror)
186
187 # If the checkout doesn't exist and the mirror tarball does, extract it
188 if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
189 bb.utils.mkdirhier(ud.clonedir)
190 os.chdir(ud.clonedir)
191 runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
192
193 repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
194
195 # If the repo still doesn't exist, fallback to cloning it
196 if not os.path.exists(ud.clonedir):
197 # We do this since git will use a "-l" option automatically for local urls where possible
198 if repourl.startswith("file://"):
199 repourl = repourl[7:]
200 clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
201 if ud.proto.lower() != 'file':
202 bb.fetch2.check_network_access(d, clone_cmd)
203 runfetchcmd(clone_cmd, d)
204
205 os.chdir(ud.clonedir)
206 # Update the checkout if needed
207 needupdate = False
208 for name in ud.names:
209 if not self._contains_ref(ud, d, name):
210 needupdate = True
211 if needupdate:
212 try:
213 runfetchcmd("%s remote rm origin" % ud.basecmd, d)
214 except bb.fetch2.FetchError:
215 logger.debug(1, "No Origin")
216
217 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
218 fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
219 if ud.proto.lower() != 'file':
220 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
221 runfetchcmd(fetch_cmd, d)
222 runfetchcmd("%s prune-packed" % ud.basecmd, d)
223 runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
224 ud.repochanged = True
225 os.chdir(ud.clonedir)
226 for name in ud.names:
227 if not self._contains_ref(ud, d, name):
228 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
229
230 def build_mirror_data(self, ud, d):
231 # Generate a mirror tarball if needed
232 if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
233 # it's possible that this symlink points to read-only filesystem with PREMIRROR
234 if os.path.islink(ud.fullmirror):
235 os.unlink(ud.fullmirror)
236
237 os.chdir(ud.clonedir)
238 logger.info("Creating tarball of git repository")
239 runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
240 runfetchcmd("touch %s.done" % (ud.fullmirror), d)
241
242 def unpack(self, ud, destdir, d):
243 """ unpack the downloaded src to destdir"""
244
245 subdir = ud.parm.get("subpath", "")
246 if subdir != "":
247 readpathspec = ":%s" % (subdir)
248 def_destsuffix = "%s/" % os.path.basename(subdir)
249 else:
250 readpathspec = ""
251 def_destsuffix = "git/"
252
253 destsuffix = ud.parm.get("destsuffix", def_destsuffix)
254 destdir = ud.destdir = os.path.join(destdir, destsuffix)
255 if os.path.exists(destdir):
256 bb.utils.prunedir(destdir)
257
258 cloneflags = "-s -n"
259 if ud.bareclone:
260 cloneflags += " --mirror"
261
262 # Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
263 # and you end up with some horrible union of the two when you attempt to clone it
264 # The least invasive workaround seems to be a symlink to the real directory to
265 # fool git into ignoring any .git version that may also be present.
266 #
267 # The issue is fixed in more recent versions of git so we can drop this hack in future
268 # when that version becomes common enough.
269 clonedir = ud.clonedir
270 if not ud.path.endswith(".git"):
271 indirectiondir = destdir[:-1] + ".indirectionsymlink"
272 if os.path.exists(indirectiondir):
273 os.remove(indirectiondir)
274 bb.utils.mkdirhier(os.path.dirname(indirectiondir))
275 os.symlink(ud.clonedir, indirectiondir)
276 clonedir = indirectiondir
277
278 runfetchcmd("git clone %s %s/ %s" % (cloneflags, clonedir, destdir), d)
279 if not ud.nocheckout:
280 os.chdir(destdir)
281 if subdir != "":
282 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
283 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
284 else:
285 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
286 return True
287
288 def clean(self, ud, d):
289 """ clean the git directory """
290
291 bb.utils.remove(ud.localpath, True)
292 bb.utils.remove(ud.fullmirror)
293 bb.utils.remove(ud.fullmirror + ".done")
294
295 def supports_srcrev(self):
296 return True
297
298 def _contains_ref(self, ud, d, name):
299 cmd = ""
300 if ud.nobranch:
301 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
302 ud.basecmd, ud.revisions[name])
303 else:
304 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
305 ud.basecmd, ud.revisions[name], ud.branches[name])
306 try:
307 output = runfetchcmd(cmd, d, quiet=True)
308 except bb.fetch2.FetchError:
309 return False
310 if len(output.split()) > 1:
311 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
312 return output.split()[0] != "0"
313
314 def _revision_key(self, ud, d, name):
315 """
316 Return a unique key for the url
317 """
318 return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
319
320 def _lsremote(self, ud, d, search):
321 """
322 Run git ls-remote with the specified search string
323 """
324 if ud.user:
325 username = ud.user + '@'
326 else:
327 username = ""
328
329 cmd = "%s ls-remote %s://%s%s%s %s" % \
330 (ud.basecmd, ud.proto, username, ud.host, ud.path, search)
331 if ud.proto.lower() != 'file':
332 bb.fetch2.check_network_access(d, cmd)
333 output = runfetchcmd(cmd, d, True)
334 if not output:
335 raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
336 return output
337
338 def _latest_revision(self, ud, d, name):
339 """
340 Compute the HEAD revision for the url
341 """
342 search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
343 output = self._lsremote(ud, d, search)
344 return output.split()[0]
345
346 def _build_revision(self, ud, d, name):
347 return ud.revisions[name]
348
349 def checkstatus(self, ud, d):
350 fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
351 try:
352 runfetchcmd(fetchcmd, d, quiet=True)
353 return True
354 except FetchError:
355 return False
diff --git a/bitbake/lib/bb/fetch2/gitannex.py b/bitbake/lib/bb/fetch2/gitannex.py
new file mode 100644
index 0000000000..0f37897450
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitannex.py
@@ -0,0 +1,76 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git annex implementation
5"""
6
7# Copyright (C) 2014 Otavio Salvador
8# Copyright (C) 2014 O.S. Systems Software LTDA.
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import os
24import bb
25from bb import data
26from bb.fetch2.git import Git
27from bb.fetch2 import runfetchcmd
28from bb.fetch2 import logger
29
30class GitANNEX(Git):
31 def supports(self, ud, d):
32 """
33 Check to see if a given url can be fetched with git.
34 """
35 return ud.type in ['gitannex']
36
37 def uses_annex(self, ud, d):
38 for name in ud.names:
39 try:
40 runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
41 return True
42 except bb.fetch.FetchError:
43 pass
44
45 return False
46
47 def update_annex(self, ud, d):
48 try:
49 runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
50 except bb.fetch.FetchError:
51 return False
52 runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
53
54 return True
55
56 def download(self, ud, d):
57 Git.download(self, ud, d)
58
59 os.chdir(ud.clonedir)
60 annex = self.uses_annex(ud, d)
61 if annex:
62 self.update_annex(ud, d)
63
64 def unpack(self, ud, destdir, d):
65 Git.unpack(self, ud, destdir, d)
66
67 os.chdir(ud.destdir)
68 try:
69 runfetchcmd("%s annex sync" % (ud.basecmd), d)
70 except bb.fetch.FetchError:
71 pass
72
73 annex = self.uses_annex(ud, d)
74 if annex:
75 runfetchcmd("%s annex get" % (ud.basecmd), d)
76 runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
new file mode 100644
index 0000000000..1a762153c4
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -0,0 +1,126 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git submodules implementation
5"""
6
7# Copyright (C) 2013 Richard Purdie
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import os
23import bb
24from bb import data
25from bb.fetch2.git import Git
26from bb.fetch2 import runfetchcmd
27from bb.fetch2 import logger
28
29class GitSM(Git):
30 def supports(self, ud, d):
31 """
32 Check to see if a given url can be fetched with git.
33 """
34 return ud.type in ['gitsm']
35
36 def uses_submodules(self, ud, d):
37 for name in ud.names:
38 try:
39 runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
40 return True
41 except bb.fetch.FetchError:
42 pass
43 return False
44
45 def _set_relative_paths(self, repopath):
46 """
47 Fix submodule paths to be relative instead of absolute,
48 so that when we move the repo it doesn't break
49 (In Git 1.7.10+ this is done automatically)
50 """
51 submodules = []
52 with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
53 for line in f.readlines():
54 if line.startswith('[submodule'):
55 submodules.append(line.split('"')[1])
56
57 for module in submodules:
58 repo_conf = os.path.join(repopath, module, '.git')
59 if os.path.exists(repo_conf):
60 with open(repo_conf, 'r') as f:
61 lines = f.readlines()
62 newpath = ''
63 for i, line in enumerate(lines):
64 if line.startswith('gitdir:'):
65 oldpath = line.split(': ')[-1].rstrip()
66 if oldpath.startswith('/'):
67 newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
68 lines[i] = 'gitdir: %s\n' % newpath
69 break
70 if newpath:
71 with open(repo_conf, 'w') as f:
72 for line in lines:
73 f.write(line)
74
75 repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
76 if os.path.exists(repo_conf2):
77 with open(repo_conf2, 'r') as f:
78 lines = f.readlines()
79 newpath = ''
80 for i, line in enumerate(lines):
81 if line.lstrip().startswith('worktree = '):
82 oldpath = line.split(' = ')[-1].rstrip()
83 if oldpath.startswith('/'):
84 newpath = '../' * (module.count('/') + 3) + module
85 lines[i] = '\tworktree = %s\n' % newpath
86 break
87 if newpath:
88 with open(repo_conf2, 'w') as f:
89 for line in lines:
90 f.write(line)
91
92 def update_submodules(self, ud, d):
93 # We have to convert bare -> full repo, do the submodule bit, then convert back
94 tmpclonedir = ud.clonedir + ".tmp"
95 gitdir = tmpclonedir + os.sep + ".git"
96 bb.utils.remove(tmpclonedir, True)
97 os.mkdir(tmpclonedir)
98 os.rename(ud.clonedir, gitdir)
99 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
100 os.chdir(tmpclonedir)
101 runfetchcmd(ud.basecmd + " reset --hard", d)
102 runfetchcmd(ud.basecmd + " submodule init", d)
103 runfetchcmd(ud.basecmd + " submodule update", d)
104 self._set_relative_paths(tmpclonedir)
105 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
106 os.rename(gitdir, ud.clonedir,)
107 bb.utils.remove(tmpclonedir, True)
108
109 def download(self, ud, d):
110 Git.download(self, ud, d)
111
112 os.chdir(ud.clonedir)
113 submodules = self.uses_submodules(ud, d)
114 if submodules:
115 self.update_submodules(ud, d)
116
117 def unpack(self, ud, destdir, d):
118 Git.unpack(self, ud, destdir, d)
119
120 os.chdir(ud.destdir)
121 submodules = self.uses_submodules(ud, d)
122 if submodules:
123 runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d)
124 runfetchcmd(ud.basecmd + " submodule init", d)
125 runfetchcmd(ud.basecmd + " submodule update", d)
126
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
new file mode 100644
index 0000000000..6927f6111e
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -0,0 +1,187 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for mercurial DRCS (hg).
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10# Copyright (C) 2007 Robert Schuster
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27import os
28import sys
29import logging
30import bb
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import runfetchcmd
36from bb.fetch2 import logger
37
38class Hg(FetchMethod):
39 """Class to fetch from mercurial repositories"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with mercurial.
43 """
44 return ud.type in ['hg']
45
46 def urldata_init(self, ud, d):
47 """
48 init hg specific variable within url data
49 """
50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url)
52
53 ud.module = ud.parm["module"]
54
55 # Create paths to mercurial checkouts
56 relpath = self._strip_leading_slashes(ud.path)
57 ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
58 ud.moddir = os.path.join(ud.pkgdir, ud.module)
59
60 ud.setup_revisons(d)
61
62 if 'rev' in ud.parm:
63 ud.revision = ud.parm['rev']
64 elif not ud.revision:
65 ud.revision = self.latest_revision(ud, d)
66
67 ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
68
69 def need_update(self, ud, d):
70 revTag = ud.parm.get('rev', 'tip')
71 if revTag == "tip":
72 return True
73 if not os.path.exists(ud.localpath):
74 return True
75 return False
76
77 def _buildhgcommand(self, ud, d, command):
78 """
79 Build up an hg commandline based on ud
80 command is "fetch", "update", "info"
81 """
82
83 basecmd = data.expand('${FETCHCMD_hg}', d)
84
85 proto = ud.parm.get('protocol', 'http')
86
87 host = ud.host
88 if proto == "file":
89 host = "/"
90 ud.host = "localhost"
91
92 if not ud.user:
93 hgroot = host + ud.path
94 else:
95 if ud.pswd:
96 hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path
97 else:
98 hgroot = ud.user + "@" + host + ud.path
99
100 if command == "info":
101 return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
102
103 options = [];
104
105 # Don't specify revision for the fetch; clone the entire repo.
106 # This avoids an issue if the specified revision is a tag, because
107 # the tag actually exists in the specified revision + 1, so it won't
108 # be available when used in any successive commands.
109 if ud.revision and command != "fetch":
110 options.append("-r %s" % ud.revision)
111
112 if command == "fetch":
113 cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
114 elif command == "pull":
115 # do not pass options list; limiting pull to rev causes the local
116 # repo not to contain it and immediately following "update" command
117 # will crash
118 if ud.user and ud.pswd:
119 cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto)
120 else:
121 cmd = "%s pull" % (basecmd)
122 elif command == "update":
123 cmd = "%s update -C %s" % (basecmd, " ".join(options))
124 else:
125 raise FetchError("Invalid hg command %s" % command, ud.url)
126
127 return cmd
128
129 def download(self, ud, d):
130 """Fetch url"""
131
132 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
133
134 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
135 updatecmd = self._buildhgcommand(ud, d, "pull")
136 logger.info("Update " + ud.url)
137 # update sources there
138 os.chdir(ud.moddir)
139 logger.debug(1, "Running %s", updatecmd)
140 bb.fetch2.check_network_access(d, updatecmd, ud.url)
141 runfetchcmd(updatecmd, d)
142
143 else:
144 fetchcmd = self._buildhgcommand(ud, d, "fetch")
145 logger.info("Fetch " + ud.url)
146 # check out sources there
147 bb.utils.mkdirhier(ud.pkgdir)
148 os.chdir(ud.pkgdir)
149 logger.debug(1, "Running %s", fetchcmd)
150 bb.fetch2.check_network_access(d, fetchcmd, ud.url)
151 runfetchcmd(fetchcmd, d)
152
153 # Even when we clone (fetch), we still need to update as hg's clone
154 # won't checkout the specified revision if its on a branch
155 updatecmd = self._buildhgcommand(ud, d, "update")
156 os.chdir(ud.moddir)
157 logger.debug(1, "Running %s", updatecmd)
158 runfetchcmd(updatecmd, d)
159
160 scmdata = ud.parm.get("scmdata", "")
161 if scmdata == "keep":
162 tar_flags = ""
163 else:
164 tar_flags = "--exclude '.hg' --exclude '.hgrags'"
165
166 os.chdir(ud.pkgdir)
167 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
168
169 def supports_srcrev(self):
170 return True
171
172 def _latest_revision(self, ud, d, name):
173 """
174 Compute tip revision for the url
175 """
176 bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
177 output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
178 return output.strip()
179
180 def _build_revision(self, ud, d, name):
181 return ud.revision
182
183 def _revision_key(self, ud, d, name):
184 """
185 Return a unique key for the url
186 """
187 return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
new file mode 100644
index 0000000000..5c4e42a942
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -0,0 +1,116 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import urllib
30import bb
31import bb.utils
32from bb import data
33from bb.fetch2 import FetchMethod, FetchError
34from bb.fetch2 import logger
35
36class Local(FetchMethod):
37 def supports(self, urldata, d):
38 """
39 Check to see if a given url represents a local fetch.
40 """
41 return urldata.type in ['file']
42
43 def urldata_init(self, ud, d):
44 # We don't set localfile as for this fetcher the file is already local!
45 ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
46 ud.basename = os.path.basename(ud.decodedurl)
47 ud.basepath = ud.decodedurl
48 return
49
50 def localpath(self, urldata, d):
51 """
52 Return the local filename of a given url assuming a successful fetch.
53 """
54 path = urldata.decodedurl
55 newpath = path
56 if path[0] != "/":
57 filespath = data.getVar('FILESPATH', d, True)
58 if filespath:
59 logger.debug(2, "Searching for %s in paths: \n%s" % (path, "\n ".join(filespath.split(":"))))
60 newpath = bb.utils.which(filespath, path)
61 if not newpath:
62 filesdir = data.getVar('FILESDIR', d, True)
63 if filesdir:
64 logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
65 newpath = os.path.join(filesdir, path)
66 if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
67 # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
68 newpath = bb.utils.which(filespath, ".")
69 logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
70 return newpath
71 if not os.path.exists(newpath):
72 dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
73 logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
74 bb.utils.mkdirhier(os.path.dirname(dldirfile))
75 return dldirfile
76 return newpath
77
78 def need_update(self, ud, d):
79 if ud.url.find("*") != -1:
80 return False
81 if os.path.exists(ud.localpath):
82 return False
83 return True
84
85 def download(self, urldata, d):
86 """Fetch urls (no-op for Local method)"""
87 # no need to fetch local files, we'll deal with them in place.
88 if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
89 locations = []
90 filespath = data.getVar('FILESPATH', d, True)
91 if filespath:
92 locations = filespath.split(":")
93 filesdir = data.getVar('FILESDIR', d, True)
94 if filesdir:
95 locations.append(filesdir)
96 locations.append(d.getVar("DL_DIR", True))
97
98 msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
99 raise FetchError(msg)
100
101 return True
102
103 def checkstatus(self, urldata, d):
104 """
105 Check the status of the url
106 """
107 if urldata.localpath.find("*") != -1:
108 logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
109 return True
110 if os.path.exists(urldata.localpath):
111 return True
112 return False
113
114 def clean(self, urldata, d):
115 return
116
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
new file mode 100644
index 0000000000..3d8779682f
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -0,0 +1,135 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4Bitbake "Fetch" implementation for osc (Opensuse build service client).
5Based on the svn "Fetch" implementation.
6
7"""
8
9import os
10import sys
11import logging
12import bb
13from bb import data
14from bb.fetch2 import FetchMethod
15from bb.fetch2 import FetchError
16from bb.fetch2 import MissingParameterError
17from bb.fetch2 import runfetchcmd
18
19class Osc(FetchMethod):
20 """Class to fetch a module or modules from Opensuse build server
21 repositories."""
22
23 def supports(self, ud, d):
24 """
25 Check to see if a given url can be fetched with osc.
26 """
27 return ud.type in ['osc']
28
29 def urldata_init(self, ud, d):
30 if not "module" in ud.parm:
31 raise MissingParameterError('module', ud.url)
32
33 ud.module = ud.parm["module"]
34
35 # Create paths to osc checkouts
36 relpath = self._strip_leading_slashes(ud.path)
37 ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
38 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
39
40 if 'rev' in ud.parm:
41 ud.revision = ud.parm['rev']
42 else:
43 pv = data.getVar("PV", d, 0)
44 rev = bb.fetch2.srcrev_internal_helper(ud, d)
45 if rev and rev != True:
46 ud.revision = rev
47 else:
48 ud.revision = ""
49
50 ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
51
52 def _buildosccommand(self, ud, d, command):
53 """
54 Build up an ocs commandline based on ud
55 command is "fetch", "update", "info"
56 """
57
58 basecmd = data.expand('${FETCHCMD_osc}', d)
59
60 proto = ud.parm.get('protocol', 'ocs')
61
62 options = []
63
64 config = "-c %s" % self.generate_config(ud, d)
65
66 if ud.revision:
67 options.append("-r %s" % ud.revision)
68
69 coroot = self._strip_leading_slashes(ud.path)
70
71 if command == "fetch":
72 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
73 elif command == "update":
74 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
75 else:
76 raise FetchError("Invalid osc command %s" % command, ud.url)
77
78 return osccmd
79
80 def download(self, ud, d):
81 """
82 Fetch url
83 """
84
85 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
86
87 if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
88 oscupdatecmd = self._buildosccommand(ud, d, "update")
89 logger.info("Update "+ ud.url)
90 # update sources there
91 os.chdir(ud.moddir)
92 logger.debug(1, "Running %s", oscupdatecmd)
93 bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
94 runfetchcmd(oscupdatecmd, d)
95 else:
96 oscfetchcmd = self._buildosccommand(ud, d, "fetch")
97 logger.info("Fetch " + ud.url)
98 # check out sources there
99 bb.utils.mkdirhier(ud.pkgdir)
100 os.chdir(ud.pkgdir)
101 logger.debug(1, "Running %s", oscfetchcmd)
102 bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
103 runfetchcmd(oscfetchcmd, d)
104
105 os.chdir(os.path.join(ud.pkgdir + ud.path))
106 # tar them up to a defined filename
107 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
108
109 def supports_srcrev(self):
110 return False
111
112 def generate_config(self, ud, d):
113 """
114 Generate a .oscrc to be used for this run.
115 """
116
117 config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
118 if (os.path.exists(config_path)):
119 os.remove(config_path)
120
121 f = open(config_path, 'w')
122 f.write("[general]\n")
123 f.write("apisrv = %s\n" % ud.host)
124 f.write("scheme = http\n")
125 f.write("su-wrapper = su -c\n")
126 f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
127 f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
128 f.write("extra-pkgs = gzip\n")
129 f.write("\n")
130 f.write("[%s]\n" % ud.host)
131 f.write("user = %s\n" % ud.parm["user"])
132 f.write("pass = %s\n" % ud.parm["pswd"])
133 f.close()
134
135 return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
new file mode 100644
index 0000000000..9329d72779
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -0,0 +1,194 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from future_builtins import zip
29import os
30import subprocess
31import logging
32import bb
33from bb import data
34from bb.fetch2 import FetchMethod
35from bb.fetch2 import FetchError
36from bb.fetch2 import logger
37from bb.fetch2 import runfetchcmd
38
39class Perforce(FetchMethod):
40 def supports(self, ud, d):
41 return ud.type in ['p4']
42
43 def doparse(url, d):
44 parm = {}
45 path = url.split("://")[1]
46 delim = path.find("@");
47 if delim != -1:
48 (user, pswd, host, port) = path.split('@')[0].split(":")
49 path = path.split('@')[1]
50 else:
51 (host, port) = data.getVar('P4PORT', d).split(':')
52 user = ""
53 pswd = ""
54
55 if path.find(";") != -1:
56 keys=[]
57 values=[]
58 plist = path.split(';')
59 for item in plist:
60 if item.count('='):
61 (key, value) = item.split('=')
62 keys.append(key)
63 values.append(value)
64
65 parm = dict(zip(keys, values))
66 path = "//" + path.split(';')[0]
67 host += ":%s" % (port)
68 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
69
70 return host, path, user, pswd, parm
71 doparse = staticmethod(doparse)
72
73 def getcset(d, depot, host, user, pswd, parm):
74 p4opt = ""
75 if "cset" in parm:
76 return parm["cset"];
77 if user:
78 p4opt += " -u %s" % (user)
79 if pswd:
80 p4opt += " -P %s" % (pswd)
81 if host:
82 p4opt += " -p %s" % (host)
83
84 p4date = data.getVar("P4DATE", d, True)
85 if "revision" in parm:
86 depot += "#%s" % (parm["revision"])
87 elif "label" in parm:
88 depot += "@%s" % (parm["label"])
89 elif p4date:
90 depot += "@%s" % (p4date)
91
92 p4cmd = data.getVar('FETCHCMD_p4', d, True)
93 logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
94 p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
95 cset = p4file.strip()
96 logger.debug(1, "READ %s", cset)
97 if not cset:
98 return -1
99
100 return cset.split(' ')[1]
101 getcset = staticmethod(getcset)
102
103 def urldata_init(self, ud, d):
104 (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
105
106 # If a label is specified, we use that as our filename
107
108 if "label" in parm:
109 ud.localfile = "%s.tar.gz" % (parm["label"])
110 return
111
112 base = path
113 which = path.find('/...')
114 if which != -1:
115 base = path[:which-1]
116
117 base = self._strip_leading_slashes(base)
118
119 cset = Perforce.getcset(d, path, host, user, pswd, parm)
120
121 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
122
123 def download(self, ud, d):
124 """
125 Fetch urls
126 """
127
128 (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
129
130 if depot.find('/...') != -1:
131 path = depot[:depot.find('/...')]
132 else:
133 path = depot
134
135 module = parm.get('module', os.path.basename(path))
136
137 # Get the p4 command
138 p4opt = ""
139 if user:
140 p4opt += " -u %s" % (user)
141
142 if pswd:
143 p4opt += " -P %s" % (pswd)
144
145 if host:
146 p4opt += " -p %s" % (host)
147
148 p4cmd = data.getVar('FETCHCMD_p4', d, True)
149
150 # create temp directory
151 logger.debug(2, "Fetch: creating temporary directory")
152 bb.utils.mkdirhier(d.expand('${WORKDIR}'))
153 mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
154 tmpfile, errors = bb.process.run(mktemp)
155 tmpfile = tmpfile.strip()
156 if not tmpfile:
157 raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
158
159 if "label" in parm:
160 depot = "%s@%s" % (depot, parm["label"])
161 else:
162 cset = Perforce.getcset(d, depot, host, user, pswd, parm)
163 depot = "%s@%s" % (depot, cset)
164
165 os.chdir(tmpfile)
166 logger.info("Fetch " + ud.url)
167 logger.info("%s%s files %s", p4cmd, p4opt, depot)
168 p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
169 p4file = [f.rstrip() for f in p4file.splitlines()]
170
171 if not p4file:
172 raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
173
174 count = 0
175
176 for file in p4file:
177 list = file.split()
178
179 if list[2] == "delete":
180 continue
181
182 dest = list[0][len(path)+1:]
183 where = dest.find("#")
184
185 subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
186 count = count + 1
187
188 if count == 0:
189 logger.error()
190 raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
191
192 runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
193 # cleanup
194 bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
new file mode 100644
index 0000000000..21678eb7d9
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -0,0 +1,98 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake "Fetch" repo (git) implementation
5
6"""
7
8# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
9#
10# Based on git.py which is:
11#Copyright (C) 2005 Richard Purdie
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import bb
28from bb import data
29from bb.fetch2 import FetchMethod
30from bb.fetch2 import runfetchcmd
31
32class Repo(FetchMethod):
33 """Class to fetch a module or modules from repo (git) repositories"""
34 def supports(self, ud, d):
35 """
36 Check to see if a given url can be fetched with repo.
37 """
38 return ud.type in ["repo"]
39
40 def urldata_init(self, ud, d):
41 """
42 We don"t care about the git rev of the manifests repository, but
43 we do care about the manifest to use. The default is "default".
44 We also care about the branch or tag to be used. The default is
45 "master".
46 """
47
48 ud.proto = ud.parm.get('protocol', 'git')
49 ud.branch = ud.parm.get('branch', 'master')
50 ud.manifest = ud.parm.get('manifest', 'default.xml')
51 if not ud.manifest.endswith('.xml'):
52 ud.manifest += '.xml'
53
54 ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
55
56 def download(self, ud, d):
57 """Fetch url"""
58
59 if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
60 logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
61 return
62
63 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
64 repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
65 codir = os.path.join(repodir, gitsrcname, ud.manifest)
66
67 if ud.user:
68 username = ud.user + "@"
69 else:
70 username = ""
71
72 bb.utils.mkdirhier(os.path.join(codir, "repo"))
73 os.chdir(os.path.join(codir, "repo"))
74 if not os.path.exists(os.path.join(codir, "repo", ".repo")):
75 bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
76 runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
77
78 bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
79 runfetchcmd("repo sync", d)
80 os.chdir(codir)
81
82 scmdata = ud.parm.get("scmdata", "")
83 if scmdata == "keep":
84 tar_flags = ""
85 else:
86 tar_flags = "--exclude '.repo' --exclude '.git'"
87
88 # Create a cache
89 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
90
91 def supports_srcrev(self):
92 return False
93
94 def _build_revision(self, ud, d):
95 return ud.manifest
96
97 def _want_sortable_revision(self, ud, d):
98 return False
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
new file mode 100644
index 0000000000..8ea4ef2ff3
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -0,0 +1,129 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake SFTP Fetch implementation
5
6Class for fetching files via SFTP. It tries to adhere to the (now
7expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
8Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
9(SSH)" (SECSH URI).
10
11It uses SFTP (as to adhere to the SECSH URI specification). It only
12supports key based authentication, not password. This class, unlike
13the SSH fetcher, does not support fetching a directory tree from the
14remote.
15
16 http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
17 https://www.iana.org/assignments/uri-schemes/prov/sftp
18 https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
19
20Please note that '/' is used as host path seperator, and not ":"
21as you may be used to from the scp/sftp commands. You can use a
22~ (tilde) to specify a path relative to your home directory.
23(The /~user/ syntax, for specyfing a path relative to another
24user's home directory is not supported.) Note that the tilde must
25still follow the host path seperator ("/"). See exampels below.
26
27Example SRC_URIs:
28
29SRC_URI = "sftp://host.example.com/dir/path.file.txt"
30
31A path relative to your home directory.
32
33SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
34
35You can also specify a username (specyfing password in the
36URI is not supported, use SSH keys to authenticate):
37
38SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
39
40"""
41
42# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
43#
44# Based in part on bb.fetch2.wget:
45# Copyright (C) 2003, 2004 Chris Larson
46#
47# This program is free software; you can redistribute it and/or modify
48# it under the terms of the GNU General Public License version 2 as
49# published by the Free Software Foundation.
50#
51# This program is distributed in the hope that it will be useful,
52# but WITHOUT ANY WARRANTY; without even the implied warranty of
53# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
54# GNU General Public License for more details.
55#
56# You should have received a copy of the GNU General Public License along
57# with this program; if not, write to the Free Software Foundation, Inc.,
58# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
59#
60# Based on functions from the base bb module, Copyright 2003 Holger Schurig
61
62import os
63import bb
64import urllib
65import commands
66from bb import data
67from bb.fetch2 import URI
68from bb.fetch2 import FetchMethod
69from bb.fetch2 import runfetchcmd
70
71
72class SFTP(FetchMethod):
73 """Class to fetch urls via 'sftp'"""
74
75 def supports(self, ud, d):
76 """
77 Check to see if a given url can be fetched with sftp.
78 """
79 return ud.type in ['sftp']
80
81 def recommends_checksum(self, urldata):
82 return True
83
84 def urldata_init(self, ud, d):
85 if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
86 raise bb.fetch2.ParameterError(
87 "Invalid protocol - if you wish to fetch from a " +
88 "git repository using ssh, you need to use the " +
89 "git:// prefix with protocol=ssh", ud.url)
90
91 if 'downloadfilename' in ud.parm:
92 ud.basename = ud.parm['downloadfilename']
93 else:
94 ud.basename = os.path.basename(ud.path)
95
96 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
97
98 def download(self, ud, d):
99 """Fetch urls"""
100
101 urlo = URI(ud.url)
102 basecmd = 'sftp -oPasswordAuthentication=no'
103 port = ''
104 if urlo.port:
105 port = '-P %d' % urlo.port
106 urlo.port = None
107
108 dldir = data.getVar('DL_DIR', d, True)
109 lpath = os.path.join(dldir, ud.localfile)
110
111 user = ''
112 if urlo.userinfo:
113 user = urlo.userinfo + '@'
114
115 path = urlo.path
116
117 # Supoprt URIs relative to the user's home directory, with
118 # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
119 if path[:3] == '/~/':
120 path = path[3:]
121
122 remote = '%s%s:%s' % (user, urlo.hostname, path)
123
124 cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
125 commands.mkarg(lpath))
126
127 bb.fetch2.check_network_access(d, cmd, ud.url)
128 runfetchcmd(cmd, d)
129 return True
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
new file mode 100644
index 0000000000..4ae979472c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -0,0 +1,127 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3'''
4BitBake 'Fetch' implementations
5
6This implementation is for Secure Shell (SSH), and attempts to comply with the
7IETF secsh internet draft:
8 http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
9
10 Currently does not support the sftp parameters, as this uses scp
11 Also does not support the 'fingerprint' connection parameter.
12
13 Please note that '/' is used as host, path separator not ':' as you may
14 be used to, also '~' can be used to specify user HOME, but again after '/'
15
16 Example SRC_URI:
17 SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
18 SRC_URI = "ssh://user@host.example.com/~/file.txt"
19'''
20
21# Copyright (C) 2006 OpenedHand Ltd.
22#
23#
24# Based in part on svk.py:
25# Copyright (C) 2006 Holger Hans Peter Freyther
26# Based on svn.py:
27# Copyright (C) 2003, 2004 Chris Larson
28# Based on functions from the base bb module:
29# Copyright 2003 Holger Schurig
30#
31#
32# This program is free software; you can redistribute it and/or modify
33# it under the terms of the GNU General Public License version 2 as
34# published by the Free Software Foundation.
35#
36# This program is distributed in the hope that it will be useful,
37# but WITHOUT ANY WARRANTY; without even the implied warranty of
38# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
39# GNU General Public License for more details.
40#
41# You should have received a copy of the GNU General Public License along
42# with this program; if not, write to the Free Software Foundation, Inc.,
43# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
44
45import re, os
46from bb import data
47from bb.fetch2 import FetchMethod
48from bb.fetch2 import FetchError
49from bb.fetch2 import logger
50from bb.fetch2 import runfetchcmd
51
52
53__pattern__ = re.compile(r'''
54 \s* # Skip leading whitespace
55 ssh:// # scheme
56 ( # Optional username/password block
57 (?P<user>\S+) # username
58 (:(?P<pass>\S+))? # colon followed by the password (optional)
59 )?
60 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
61 @
62 (?P<host>\S+?) # non-greedy match of the host
63 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
64 /
65 (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
66 # and may include the use of '~' to reference the remote home
67 # directory
68 (?P<sparam>(;[^;]+)*)? # parameters block (optional)
69 $
70''', re.VERBOSE)
71
72class SSH(FetchMethod):
73 '''Class to fetch a module or modules via Secure Shell'''
74
75 def supports(self, urldata, d):
76 return __pattern__.match(urldata.url) != None
77
78 def supports_checksum(self, urldata):
79 return False
80
81 def urldata_init(self, urldata, d):
82 if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
83 raise bb.fetch2.ParameterError(
84 "Invalid protocol - if you wish to fetch from a git " +
85 "repository using ssh, you need to use " +
86 "git:// prefix with protocol=ssh", urldata.url)
87 m = __pattern__.match(urldata.url)
88 path = m.group('path')
89 host = m.group('host')
90 urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
91
92 def download(self, urldata, d):
93 dldir = d.getVar('DL_DIR', True)
94
95 m = __pattern__.match(urldata.url)
96 path = m.group('path')
97 host = m.group('host')
98 port = m.group('port')
99 user = m.group('user')
100 password = m.group('pass')
101
102 if port:
103 portarg = '-P %s' % port
104 else:
105 portarg = ''
106
107 if user:
108 fr = user
109 if password:
110 fr += ':%s' % password
111 fr += '@%s' % host
112 else:
113 fr = host
114 fr += ':%s' % path
115
116
117 import commands
118 cmd = 'scp -B -r %s %s %s/' % (
119 portarg,
120 commands.mkarg(fr),
121 commands.mkarg(dldir)
122 )
123
124 bb.fetch2.check_network_access(d, cmd, urldata.url)
125
126 runfetchcmd(cmd, d)
127
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
new file mode 100644
index 0000000000..8847461913
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -0,0 +1,191 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for svn.
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23#
24# Based on functions from the base bb module, Copyright 2003 Holger Schurig
25
26import os
27import sys
28import logging
29import bb
30import re
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import runfetchcmd
36from bb.fetch2 import logger
37
38class Svn(FetchMethod):
39 """Class to fetch a module or modules from svn repositories"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with svn.
43 """
44 return ud.type in ['svn']
45
46 def urldata_init(self, ud, d):
47 """
48 init svn specific variable within url data
49 """
50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url)
52
53 ud.basecmd = d.getVar('FETCHCMD_svn', True)
54
55 ud.module = ud.parm["module"]
56
57 # Create paths to svn checkouts
58 relpath = self._strip_leading_slashes(ud.path)
59 ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
60 ud.moddir = os.path.join(ud.pkgdir, ud.module)
61
62 ud.setup_revisons(d)
63
64 if 'rev' in ud.parm:
65 ud.revision = ud.parm['rev']
66
67 ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
68
69 def _buildsvncommand(self, ud, d, command):
70 """
71 Build up an svn commandline based on ud
72 command is "fetch", "update", "info"
73 """
74
75 proto = ud.parm.get('protocol', 'svn')
76
77 svn_rsh = None
78 if proto == "svn+ssh" and "rsh" in ud.parm:
79 svn_rsh = ud.parm["rsh"]
80
81 svnroot = ud.host + ud.path
82
83 options = []
84
85 options.append("--no-auth-cache")
86
87 if ud.user:
88 options.append("--username %s" % ud.user)
89
90 if ud.pswd:
91 options.append("--password %s" % ud.pswd)
92
93 if command == "info":
94 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
95 elif command == "log1":
96 svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
97 else:
98 suffix = ""
99 if ud.revision:
100 options.append("-r %s" % ud.revision)
101 suffix = "@%s" % (ud.revision)
102
103 if command == "fetch":
104 svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
105 elif command == "update":
106 svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
107 else:
108 raise FetchError("Invalid svn command %s" % command, ud.url)
109
110 if svn_rsh:
111 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
112
113 return svncmd
114
115 def download(self, ud, d):
116 """Fetch url"""
117
118 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
119
120 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
121 svnupdatecmd = self._buildsvncommand(ud, d, "update")
122 logger.info("Update " + ud.url)
123 # update sources there
124 os.chdir(ud.moddir)
125 # We need to attempt to run svn upgrade first in case its an older working format
126 try:
127 runfetchcmd(ud.basecmd + " upgrade", d)
128 except FetchError:
129 pass
130 logger.debug(1, "Running %s", svnupdatecmd)
131 bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
132 runfetchcmd(svnupdatecmd, d)
133 else:
134 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
135 logger.info("Fetch " + ud.url)
136 # check out sources there
137 bb.utils.mkdirhier(ud.pkgdir)
138 os.chdir(ud.pkgdir)
139 logger.debug(1, "Running %s", svnfetchcmd)
140 bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
141 runfetchcmd(svnfetchcmd, d)
142
143 scmdata = ud.parm.get("scmdata", "")
144 if scmdata == "keep":
145 tar_flags = ""
146 else:
147 tar_flags = "--exclude '.svn'"
148
149 os.chdir(ud.pkgdir)
150 # tar them up to a defined filename
151 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
152
153 def clean(self, ud, d):
154 """ Clean SVN specific files and dirs """
155
156 bb.utils.remove(ud.localpath)
157 bb.utils.remove(ud.moddir, True)
158
159
160 def supports_srcrev(self):
161 return True
162
163 def _revision_key(self, ud, d, name):
164 """
165 Return a unique key for the url
166 """
167 return "svn:" + ud.moddir
168
169 def _latest_revision(self, ud, d, name):
170 """
171 Return the latest upstream revision number
172 """
173 bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"))
174
175 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True)
176
177 # skip the first line, as per output of svn log
178 # then we expect the revision on the 2nd line
179 revision = re.search('^r([0-9]*)', output.splitlines()[1]).group(1)
180
181 return revision
182
183 def sortable_revision(self, ud, d, name):
184 """
185 Return a sortable revision number which in our case is the revision number
186 """
187
188 return False, self._build_revision(ud, d)
189
190 def _build_revision(self, ud, d):
191 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
new file mode 100644
index 0000000000..0456490368
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -0,0 +1,106 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import logging
30import bb
31import urllib
32from bb import data
33from bb.fetch2 import FetchMethod
34from bb.fetch2 import FetchError
35from bb.fetch2 import logger
36from bb.fetch2 import runfetchcmd
37
38class Wget(FetchMethod):
39 """Class to fetch urls via 'wget'"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with wget.
43 """
44 return ud.type in ['http', 'https', 'ftp']
45
46 def recommends_checksum(self, urldata):
47 return True
48
49 def urldata_init(self, ud, d):
50 if 'protocol' in ud.parm:
51 if ud.parm['protocol'] == 'git':
52 raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
53
54 if 'downloadfilename' in ud.parm:
55 ud.basename = ud.parm['downloadfilename']
56 else:
57 ud.basename = os.path.basename(ud.path)
58
59 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
60
61 self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
62
63 def _runwget(self, ud, d, command, quiet):
64
65 logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
66 bb.fetch2.check_network_access(d, command)
67 runfetchcmd(command, d, quiet)
68
69 def download(self, ud, d):
70 """Fetch urls"""
71
72 fetchcmd = self.basecmd
73
74 if 'downloadfilename' in ud.parm:
75 dldir = d.getVar("DL_DIR", True)
76 bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
77 fetchcmd += " -O " + dldir + os.sep + ud.localfile
78
79 uri = ud.url.split(";")[0]
80 if os.path.exists(ud.localpath):
81 # file exists, but we didnt complete it.. trying again..
82 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
83 else:
84 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
85
86 self._runwget(ud, d, fetchcmd, False)
87
88 # Sanity check since wget can pretend it succeed when it didn't
89 # Also, this used to happen if sourceforge sent us to the mirror page
90 if not os.path.exists(ud.localpath):
91 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
92
93 if os.path.getsize(ud.localpath) == 0:
94 os.remove(ud.localpath)
95 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
96
97 return True
98
99 def checkstatus(self, ud, d):
100
101 uri = ud.url.split(";")[0]
102 fetchcmd = self.basecmd + " --spider '%s'" % uri
103
104 self._runwget(ud, d, fetchcmd, True)
105
106 return True