summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1538
1 files changed, 1538 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000000..451d104f67
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1538 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from __future__ import absolute_import
29from __future__ import print_function
30import os, re
31import signal
32import glob
33import logging
34import urllib
35import urlparse
36if 'git' not in urlparse.uses_netloc:
37 urlparse.uses_netloc.append('git')
38from urlparse import urlparse
39import operator
40import bb.persist_data, bb.utils
41import bb.checksum
42from bb import data
43import bb.process
44import subprocess
45
46__version__ = "2"
47_checksum_cache = bb.checksum.FileChecksumCache()
48
49logger = logging.getLogger("BitBake.Fetcher")
50
51class BBFetchException(Exception):
52 """Class all fetch exceptions inherit from"""
53 def __init__(self, message):
54 self.msg = message
55 Exception.__init__(self, message)
56
57 def __str__(self):
58 return self.msg
59
60class MalformedUrl(BBFetchException):
61 """Exception raised when encountering an invalid url"""
62 def __init__(self, url):
63 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
64 self.url = url
65 BBFetchException.__init__(self, msg)
66 self.args = (url,)
67
68class FetchError(BBFetchException):
69 """General fetcher exception when something happens incorrectly"""
70 def __init__(self, message, url = None):
71 if url:
72 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
73 else:
74 msg = "Fetcher failure: %s" % message
75 self.url = url
76 BBFetchException.__init__(self, msg)
77 self.args = (message, url)
78
79class ChecksumError(FetchError):
80 """Exception when mismatched checksum encountered"""
81 def __init__(self, message, url = None, checksum = None):
82 self.checksum = checksum
83 FetchError.__init__(self, message, url)
84
85class NoChecksumError(FetchError):
86 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
87
88class UnpackError(BBFetchException):
89 """General fetcher exception when something happens incorrectly when unpacking"""
90 def __init__(self, message, url):
91 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
92 self.url = url
93 BBFetchException.__init__(self, msg)
94 self.args = (message, url)
95
96class NoMethodError(BBFetchException):
97 """Exception raised when there is no method to obtain a supplied url or set of urls"""
98 def __init__(self, url):
99 msg = "Could not find a fetcher which supports the URL: '%s'" % url
100 self.url = url
101 BBFetchException.__init__(self, msg)
102 self.args = (url,)
103
104class MissingParameterError(BBFetchException):
105 """Exception raised when a fetch method is missing a critical parameter in the url"""
106 def __init__(self, missing, url):
107 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
108 self.url = url
109 self.missing = missing
110 BBFetchException.__init__(self, msg)
111 self.args = (missing, url)
112
113class ParameterError(BBFetchException):
114 """Exception raised when a url cannot be proccessed due to invalid parameters."""
115 def __init__(self, message, url):
116 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
117 self.url = url
118 BBFetchException.__init__(self, msg)
119 self.args = (message, url)
120
121class NetworkAccess(BBFetchException):
122 """Exception raised when network access is disabled but it is required."""
123 def __init__(self, url, cmd):
124 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
125 self.url = url
126 self.cmd = cmd
127 BBFetchException.__init__(self, msg)
128 self.args = (url, cmd)
129
130class NonLocalMethod(Exception):
131 def __init__(self):
132 Exception.__init__(self)
133
134
135class URI(object):
136 """
137 A class representing a generic URI, with methods for
138 accessing the URI components, and stringifies to the
139 URI.
140
141 It is constructed by calling it with a URI, or setting
142 the attributes manually:
143
144 uri = URI("http://example.com/")
145
146 uri = URI()
147 uri.scheme = 'http'
148 uri.hostname = 'example.com'
149 uri.path = '/'
150
151 It has the following attributes:
152
153 * scheme (read/write)
154 * userinfo (authentication information) (read/write)
155 * username (read/write)
156 * password (read/write)
157
158 Note, password is deprecated as of RFC 3986.
159
160 * hostname (read/write)
161 * port (read/write)
162 * hostport (read only)
163 "hostname:port", if both are set, otherwise just "hostname"
164 * path (read/write)
165 * path_quoted (read/write)
166 A URI quoted version of path
167 * params (dict) (read/write)
168 * relative (bool) (read only)
169 True if this is a "relative URI", (e.g. file:foo.diff)
170
171 It stringifies to the URI itself.
172
173 Some notes about relative URIs: while it's specified that
174 a URI beginning with <scheme>:// should either be directly
175 followed by a hostname or a /, the old URI handling of the
176 fetch2 library did not comform to this. Therefore, this URI
177 class has some kludges to make sure that URIs are parsed in
178 a way comforming to bitbake's current usage. This URI class
179 supports the following:
180
181 file:relative/path.diff (IETF compliant)
182 git:relative/path.git (IETF compliant)
183 git:///absolute/path.git (IETF compliant)
184 file:///absolute/path.diff (IETF compliant)
185
186 file://relative/path.diff (not IETF compliant)
187
188 But it does not support the following:
189
190 file://hostname/absolute/path.diff (would be IETF compliant)
191
192 Note that the last case only applies to a list of
193 "whitelisted" schemes (currently only file://), that requires
194 its URIs to not have a network location.
195 """
196
197 _relative_schemes = ['file', 'git']
198 _netloc_forbidden = ['file']
199
200 def __init__(self, uri=None):
201 self.scheme = ''
202 self.userinfo = ''
203 self.hostname = ''
204 self.port = None
205 self._path = ''
206 self.params = {}
207 self.relative = False
208
209 if not uri:
210 return
211
212 urlp = urlparse(uri)
213 self.scheme = urlp.scheme
214
215 # Convert URI to be relative
216 if urlp.scheme in self._netloc_forbidden:
217 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
218 urlp = urlparse(uri)
219
220 # Identify if the URI is relative or not
221 if urlp.scheme in self._relative_schemes and \
222 re.compile("^\w+:(?!//)").match(uri):
223 self.relative = True
224
225 if not self.relative:
226 self.hostname = urlp.hostname or ''
227 self.port = urlp.port
228
229 self.userinfo += urlp.username or ''
230
231 if urlp.password:
232 self.userinfo += ':%s' % urlp.password
233
234 # Do support params even for URI schemes that Python's
235 # urlparse doesn't support params for.
236 path = ''
237 param_str = ''
238 if not urlp.params:
239 path, param_str = (list(urlp.path.split(";", 1)) + [None])[:2]
240 else:
241 path = urlp.path
242 param_str = urlp.params
243
244 self.path = urllib.unquote(path)
245
246 if param_str:
247 self.params = self._param_dict(param_str)
248
249 def __str__(self):
250 userinfo = self.userinfo
251 if userinfo:
252 userinfo += '@'
253
254 return "%s:%s%s%s%s%s" % (
255 self.scheme,
256 '' if self.relative else '//',
257 userinfo,
258 self.hostport,
259 self.path_quoted,
260 self._param_str)
261
262 @property
263 def _param_str(self):
264 ret = ''
265 for key, val in self.params.items():
266 ret += ";%s=%s" % (key, val)
267 return ret
268
269 def _param_dict(self, param_str):
270 parm = {}
271
272 for keyval in param_str.split(";"):
273 key, val = keyval.split("=", 1)
274 parm[key] = val
275
276 return parm
277
278 @property
279 def hostport(self):
280 if not self.port:
281 return self.hostname
282 return "%s:%d" % (self.hostname, self.port)
283
284 @property
285 def path_quoted(self):
286 return urllib.quote(self.path)
287
288 @path_quoted.setter
289 def path_quoted(self, path):
290 self.path = urllib.unquote(path)
291
292 @property
293 def path(self):
294 return self._path
295
296 @path.setter
297 def path(self, path):
298 self._path = path
299
300 if re.compile("^/").match(path):
301 self.relative = False
302 else:
303 self.relative = True
304
305 @property
306 def username(self):
307 if self.userinfo:
308 return (self.userinfo.split(":", 1))[0]
309 return ''
310
311 @username.setter
312 def username(self, username):
313 self.userinfo = username
314 if self.password:
315 self.userinfo += ":%s" % self.password
316
317 @property
318 def password(self):
319 if self.userinfo and ":" in self.userinfo:
320 return (self.userinfo.split(":", 1))[1]
321 return ''
322
323 @password.setter
324 def password(self, password):
325 self.userinfo = "%s:%s" % (self.username, password)
326
327def decodeurl(url):
328 """Decodes an URL into the tokens (scheme, network location, path,
329 user, password, parameters).
330 """
331
332 m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
333 if not m:
334 raise MalformedUrl(url)
335
336 type = m.group('type')
337 location = m.group('location')
338 if not location:
339 raise MalformedUrl(url)
340 user = m.group('user')
341 parm = m.group('parm')
342
343 locidx = location.find('/')
344 if locidx != -1 and type.lower() != 'file':
345 host = location[:locidx]
346 path = location[locidx:]
347 else:
348 host = ""
349 path = location
350 if user:
351 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
352 if m:
353 user = m.group('user')
354 pswd = m.group('pswd')
355 else:
356 user = ''
357 pswd = ''
358
359 p = {}
360 if parm:
361 for s in parm.split(';'):
362 s1, s2 = s.split('=')
363 p[s1] = s2
364
365 return type, host, urllib.unquote(path), user, pswd, p
366
367def encodeurl(decoded):
368 """Encodes a URL from tokens (scheme, network location, path,
369 user, password, parameters).
370 """
371
372 type, host, path, user, pswd, p = decoded
373
374 if not path:
375 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
376 if not type:
377 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
378 url = '%s://' % type
379 if user and type != "file":
380 url += "%s" % user
381 if pswd:
382 url += ":%s" % pswd
383 url += "@"
384 if host and type != "file":
385 url += "%s" % host
386 # Standardise path to ensure comparisons work
387 while '//' in path:
388 path = path.replace("//", "/")
389 url += "%s" % urllib.quote(path)
390 if p:
391 for parm in p:
392 url += ";%s=%s" % (parm, p[parm])
393
394 return url
395
396def uri_replace(ud, uri_find, uri_replace, replacements, d):
397 if not ud.url or not uri_find or not uri_replace:
398 logger.error("uri_replace: passed an undefined value, not replacing")
399 return None
400 uri_decoded = list(decodeurl(ud.url))
401 uri_find_decoded = list(decodeurl(uri_find))
402 uri_replace_decoded = list(decodeurl(uri_replace))
403 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
404 result_decoded = ['', '', '', '', '', {}]
405 for loc, i in enumerate(uri_find_decoded):
406 result_decoded[loc] = uri_decoded[loc]
407 regexp = i
408 if loc == 0 and regexp and not regexp.endswith("$"):
409 # Leaving the type unanchored can mean "https" matching "file" can become "files"
410 # which is clearly undesirable.
411 regexp += "$"
412 if loc == 5:
413 # Handle URL parameters
414 if i:
415 # Any specified URL parameters must match
416 for k in uri_replace_decoded[loc]:
417 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
418 return None
419 # Overwrite any specified replacement parameters
420 for k in uri_replace_decoded[loc]:
421 for l in replacements:
422 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
423 result_decoded[loc][k] = uri_replace_decoded[loc][k]
424 elif (re.match(regexp, uri_decoded[loc])):
425 if not uri_replace_decoded[loc]:
426 result_decoded[loc] = ""
427 else:
428 for k in replacements:
429 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
430 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
431 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
432 if loc == 2:
433 # Handle path manipulations
434 basename = None
435 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
436 # If the source and destination url types differ, must be a mirrortarball mapping
437 basename = os.path.basename(ud.mirrortarball)
438 # Kill parameters, they make no sense for mirror tarballs
439 uri_decoded[5] = {}
440 elif ud.localpath and ud.method.supports_checksum(ud):
441 basename = os.path.basename(ud.localpath)
442 if basename and not result_decoded[loc].endswith(basename):
443 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
444 else:
445 return None
446 result = encodeurl(result_decoded)
447 if result == ud.url:
448 return None
449 logger.debug(2, "For url %s returning %s" % (ud.url, result))
450 return result
451
452methods = []
453urldata_cache = {}
454saved_headrevs = {}
455
456def fetcher_init(d):
457 """
458 Called to initialize the fetchers once the configuration data is known.
459 Calls before this must not hit the cache.
460 """
461 # When to drop SCM head revisions controlled by user policy
462 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
463 if srcrev_policy == "cache":
464 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
465 elif srcrev_policy == "clear":
466 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
467 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
468 try:
469 bb.fetch2.saved_headrevs = revs.items()
470 except:
471 pass
472 revs.clear()
473 else:
474 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
475
476 _checksum_cache.init_cache(d)
477
478 for m in methods:
479 if hasattr(m, "init"):
480 m.init(d)
481
482def fetcher_parse_save(d):
483 _checksum_cache.save_extras(d)
484
485def fetcher_parse_done(d):
486 _checksum_cache.save_merge(d)
487
488def fetcher_compare_revisions(d):
489 """
490 Compare the revisions in the persistant cache with current values and
491 return true/false on whether they've changed.
492 """
493
494 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
495 data2 = bb.fetch2.saved_headrevs
496
497 changed = False
498 for key in data:
499 if key not in data2 or data2[key] != data[key]:
500 logger.debug(1, "%s changed", key)
501 changed = True
502 return True
503 else:
504 logger.debug(2, "%s did not change", key)
505 return False
506
507def mirror_from_string(data):
508 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
509
510def verify_checksum(u, ud, d):
511 """
512 verify the MD5 and SHA256 checksum for downloaded src
513
514 Raises a FetchError if one or both of the SRC_URI checksums do not match
515 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
516 checksums specified.
517
518 """
519
520 if not ud.method.supports_checksum(ud):
521 return
522
523 md5data = bb.utils.md5_file(ud.localpath)
524 sha256data = bb.utils.sha256_file(ud.localpath)
525
526 if ud.method.recommends_checksum(ud):
527 # If strict checking enabled and neither sum defined, raise error
528 strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
529 if (strict and ud.md5_expected == None and ud.sha256_expected == None):
530 raise NoChecksumError('No checksum specified for %s, please add at least one to the recipe:\n'
531 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
532 (ud.localpath, ud.md5_name, md5data,
533 ud.sha256_name, sha256data), u)
534
535 # Log missing sums so user can more easily add them
536 if ud.md5_expected == None:
537 logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
538 'SRC_URI[%s] = "%s"',
539 ud.localpath, ud.md5_name, md5data)
540
541 if ud.sha256_expected == None:
542 logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
543 'SRC_URI[%s] = "%s"',
544 ud.localpath, ud.sha256_name, sha256data)
545
546 md5mismatch = False
547 sha256mismatch = False
548
549 if ud.md5_expected != md5data:
550 md5mismatch = True
551
552 if ud.sha256_expected != sha256data:
553 sha256mismatch = True
554
555 # We want to alert the user if a checksum is defined in the recipe but
556 # it does not match.
557 msg = ""
558 mismatch = False
559 if md5mismatch and ud.md5_expected:
560 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
561 mismatch = True;
562
563 if sha256mismatch and ud.sha256_expected:
564 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
565 mismatch = True;
566
567 if mismatch:
568 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
569
570 if len(msg):
571 raise ChecksumError('Checksum mismatch!%s' % msg, u, md5data)
572
573
574def update_stamp(u, ud, d):
575 """
576 donestamp is file stamp indicating the whole fetching is done
577 this function update the stamp after verifying the checksum
578 """
579 if os.path.exists(ud.donestamp):
580 # Touch the done stamp file to show active use of the download
581 try:
582 os.utime(ud.donestamp, None)
583 except:
584 # Errors aren't fatal here
585 pass
586 else:
587 verify_checksum(u, ud, d)
588 open(ud.donestamp, 'w').close()
589
590def subprocess_setup():
591 # Python installs a SIGPIPE handler by default. This is usually not what
592 # non-Python subprocesses expect.
593 # SIGPIPE errors are known issues with gzip/bash
594 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
595
596def get_autorev(d):
597 # only not cache src rev in autorev case
598 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
599 d.setVar('__BB_DONT_CACHE', '1')
600 return "AUTOINC"
601
602def get_srcrev(d):
603 """
604 Return the version string for the current package
605 (usually to be used as PV)
606 Most packages usually only have one SCM so we just pass on the call.
607 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
608 have been set.
609 """
610
611 scms = []
612 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
613 urldata = fetcher.ud
614 for u in urldata:
615 if urldata[u].method.supports_srcrev():
616 scms.append(u)
617
618 if len(scms) == 0:
619 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
620
621 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
622 autoinc, rev = urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0])
623 if len(rev) > 10:
624 rev = rev[:10]
625 if autoinc:
626 return "AUTOINC+" + rev
627 return rev
628
629 #
630 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
631 #
632 format = d.getVar('SRCREV_FORMAT', True)
633 if not format:
634 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
635
636 seenautoinc = False
637 for scm in scms:
638 ud = urldata[scm]
639 for name in ud.names:
640 autoinc, rev = ud.method.sortable_revision(scm, ud, d, name)
641 seenautoinc = seenautoinc or autoinc
642 if len(rev) > 10:
643 rev = rev[:10]
644 format = format.replace(name, rev)
645 if seenautoinc:
646 format = "AUTOINC+" + format
647
648 return format
649
650def localpath(url, d):
651 fetcher = bb.fetch2.Fetch([url], d)
652 return fetcher.localpath(url)
653
654def runfetchcmd(cmd, d, quiet = False, cleanup = []):
655 """
656 Run cmd returning the command output
657 Raise an error if interrupted or cmd fails
658 Optionally echo command output to stdout
659 Optionally remove the files/directories listed in cleanup upon failure
660 """
661
662 # Need to export PATH as binary could be in metadata paths
663 # rather than host provided
664 # Also include some other variables.
665 # FIXME: Should really include all export varaiables?
666 exportvars = ['HOME', 'PATH',
667 'HTTP_PROXY', 'http_proxy',
668 'HTTPS_PROXY', 'https_proxy',
669 'FTP_PROXY', 'ftp_proxy',
670 'FTPS_PROXY', 'ftps_proxy',
671 'NO_PROXY', 'no_proxy',
672 'ALL_PROXY', 'all_proxy',
673 'GIT_PROXY_COMMAND',
674 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
675 'SOCKS5_USER', 'SOCKS5_PASSWD']
676
677 for var in exportvars:
678 val = d.getVar(var, True)
679 if val:
680 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
681
682 logger.debug(1, "Running %s", cmd)
683
684 success = False
685 error_message = ""
686
687 try:
688 (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
689 success = True
690 except bb.process.NotFoundError as e:
691 error_message = "Fetch command %s" % (e.command)
692 except bb.process.ExecutionError as e:
693 if e.stdout:
694 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
695 elif e.stderr:
696 output = "output:\n%s" % e.stderr
697 else:
698 output = "no output"
699 error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
700 except bb.process.CmdError as e:
701 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
702 if not success:
703 for f in cleanup:
704 try:
705 bb.utils.remove(f, True)
706 except OSError:
707 pass
708
709 raise FetchError(error_message)
710
711 return output
712
713def check_network_access(d, info = "", url = None):
714 """
715 log remote network access, and error if BB_NO_NETWORK is set
716 """
717 if d.getVar("BB_NO_NETWORK", True) == "1":
718 raise NetworkAccess(url, info)
719 else:
720 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
721
722def build_mirroruris(origud, mirrors, ld):
723 uris = []
724 uds = []
725
726 replacements = {}
727 replacements["TYPE"] = origud.type
728 replacements["HOST"] = origud.host
729 replacements["PATH"] = origud.path
730 replacements["BASENAME"] = origud.path.split("/")[-1]
731 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
732
733 def adduri(uri, ud, uris, uds):
734 for line in mirrors:
735 try:
736 (find, replace) = line
737 except ValueError:
738 continue
739 newuri = uri_replace(ud, find, replace, replacements, ld)
740 if not newuri or newuri in uris or newuri == origud.url:
741 continue
742 try:
743 newud = FetchData(newuri, ld)
744 newud.setup_localpath(ld)
745 except bb.fetch2.BBFetchException as e:
746 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
747 logger.debug(1, str(e))
748 try:
749 ud.method.clean(ud, ld)
750 except UnboundLocalError:
751 pass
752 continue
753 uris.append(newuri)
754 uds.append(newud)
755
756 adduri(newuri, newud, uris, uds)
757
758 adduri(None, origud, uris, uds)
759
760 return uris, uds
761
762def rename_bad_checksum(ud, suffix):
763 """
764 Renames files to have suffix from parameter
765 """
766
767 if ud.localpath is None:
768 return
769
770 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
771 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
772 bb.utils.movefile(ud.localpath, new_localpath)
773
774
775def try_mirror_url(newuri, origud, ud, ld, check = False):
776 # Return of None or a value means we're finished
777 # False means try another url
778 try:
779 if check:
780 found = ud.method.checkstatus(newuri, ud, ld)
781 if found:
782 return found
783 return False
784
785 os.chdir(ld.getVar("DL_DIR", True))
786
787 if not os.path.exists(ud.donestamp) or ud.method.need_update(newuri, ud, ld):
788 ud.method.download(newuri, ud, ld)
789 if hasattr(ud.method,"build_mirror_data"):
790 ud.method.build_mirror_data(newuri, ud, ld)
791
792 if not ud.localpath or not os.path.exists(ud.localpath):
793 return False
794
795 if ud.localpath == origud.localpath:
796 return ud.localpath
797
798 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
799 # If that tarball is a local file:// we need to provide a symlink to it
800 dldir = ld.getVar("DL_DIR", True)
801 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
802 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
803 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
804 open(ud.donestamp, 'w').close()
805 dest = os.path.join(dldir, os.path.basename(ud.localpath))
806 if not os.path.exists(dest):
807 os.symlink(ud.localpath, dest)
808 return None
809 # Otherwise the result is a local file:// and we symlink to it
810 if not os.path.exists(origud.localpath):
811 if os.path.islink(origud.localpath):
812 # Broken symbolic link
813 os.unlink(origud.localpath)
814
815 os.symlink(ud.localpath, origud.localpath)
816 update_stamp(newuri, origud, ld)
817 return ud.localpath
818
819 except bb.fetch2.NetworkAccess:
820 raise
821
822 except bb.fetch2.BBFetchException as e:
823 if isinstance(e, ChecksumError):
824 logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (newuri, origud.url))
825 logger.warn(str(e))
826 rename_bad_checksum(ud, e.checksum)
827 elif isinstance(e, NoChecksumError):
828 raise
829 else:
830 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
831 logger.debug(1, str(e))
832 try:
833 ud.method.clean(ud, ld)
834 except UnboundLocalError:
835 pass
836 return False
837
838def try_mirrors(d, origud, mirrors, check = False):
839 """
840 Try to use a mirrored version of the sources.
841 This method will be automatically called before the fetchers go.
842
843 d Is a bb.data instance
844 uri is the original uri we're trying to download
845 mirrors is the list of mirrors we're going to try
846 """
847 ld = d.createCopy()
848
849 uris, uds = build_mirroruris(origud, mirrors, ld)
850
851 for index, uri in enumerate(uris):
852 ret = try_mirror_url(uri, origud, uds[index], ld, check)
853 if ret != False:
854 return ret
855 return None
856
857def srcrev_internal_helper(ud, d, name):
858 """
859 Return:
860 a) a source revision if specified
861 b) latest revision if SRCREV="AUTOINC"
862 c) None if not specified
863 """
864
865 if 'rev' in ud.parm:
866 return ud.parm['rev']
867
868 if 'tag' in ud.parm:
869 return ud.parm['tag']
870
871 rev = None
872 pn = d.getVar("PN", True)
873 if name != '':
874 rev = d.getVar("SRCREV_%s_pn-%s" % (name, pn), True)
875 if not rev:
876 rev = d.getVar("SRCREV_%s" % name, True)
877 if not rev:
878 rev = d.getVar("SRCREV_pn-%s" % pn, True)
879 if not rev:
880 rev = d.getVar("SRCREV", True)
881 if rev == "INVALID":
882 var = "SRCREV_pn-%s" % pn
883 if name != '':
884 var = "SRCREV_%s_pn-%s" % (name, pn)
885 raise FetchError("Please set %s to a valid value" % var, ud.url)
886 if rev == "AUTOINC":
887 rev = ud.method.latest_revision(ud.url, ud, d, name)
888
889 return rev
890
891
892def get_checksum_file_list(d):
893 """ Get a list of files checksum in SRC_URI
894
895 Returns the resolved local paths of all local file entries in
896 SRC_URI as a space-separated string
897 """
898 fetch = Fetch([], d, cache = False, localonly = True)
899
900 dl_dir = d.getVar('DL_DIR', True)
901 filelist = []
902 for u in fetch.urls:
903 ud = fetch.ud[u]
904
905 if ud and isinstance(ud.method, local.Local):
906 ud.setup_localpath(d)
907 f = ud.localpath
908 if f.startswith(dl_dir):
909 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
910 if os.path.exists(f):
911 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
912 else:
913 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
914 continue
915 filelist.append(f)
916
917 return " ".join(filelist)
918
919
920def get_file_checksums(filelist, pn):
921 """Get a list of the checksums for a list of local files
922
923 Returns the checksums for a list of local files, caching the results as
924 it proceeds
925
926 """
927
928 def checksum_file(f):
929 try:
930 checksum = _checksum_cache.get_checksum(f)
931 except OSError as e:
932 bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
933 return None
934 return checksum
935
936 checksums = []
937 for pth in filelist.split():
938 checksum = None
939 if '*' in pth:
940 # Handle globs
941 for f in glob.glob(pth):
942 checksum = checksum_file(f)
943 if checksum:
944 checksums.append((f, checksum))
945 elif os.path.isdir(pth):
946 # Handle directories
947 for root, dirs, files in os.walk(pth):
948 for name in files:
949 fullpth = os.path.join(root, name)
950 checksum = checksum_file(fullpth)
951 if checksum:
952 checksums.append((fullpth, checksum))
953 else:
954 checksum = checksum_file(pth)
955
956 if checksum:
957 checksums.append((pth, checksum))
958
959 checksums.sort(key=operator.itemgetter(1))
960 return checksums
961
962
963class FetchData(object):
964 """
965 A class which represents the fetcher state for a given URI.
966 """
967 def __init__(self, url, d, localonly = False):
968 # localpath is the location of a downloaded result. If not set, the file is local.
969 self.donestamp = None
970 self.localfile = ""
971 self.localpath = None
972 self.lockfile = None
973 self.mirrortarball = None
974 self.basename = None
975 self.basepath = None
976 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
977 self.date = self.getSRCDate(d)
978 self.url = url
979 if not self.user and "user" in self.parm:
980 self.user = self.parm["user"]
981 if not self.pswd and "pswd" in self.parm:
982 self.pswd = self.parm["pswd"]
983 self.setup = False
984
985 if "name" in self.parm:
986 self.md5_name = "%s.md5sum" % self.parm["name"]
987 self.sha256_name = "%s.sha256sum" % self.parm["name"]
988 else:
989 self.md5_name = "md5sum"
990 self.sha256_name = "sha256sum"
991 if self.md5_name in self.parm:
992 self.md5_expected = self.parm[self.md5_name]
993 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
994 self.md5_expected = None
995 else:
996 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
997 if self.sha256_name in self.parm:
998 self.sha256_expected = self.parm[self.sha256_name]
999 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1000 self.sha256_expected = None
1001 else:
1002 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
1003
1004 self.names = self.parm.get("name",'default').split(',')
1005
1006 self.method = None
1007 for m in methods:
1008 if m.supports(url, self, d):
1009 self.method = m
1010 break
1011
1012 if not self.method:
1013 raise NoMethodError(url)
1014
1015 if localonly and not isinstance(self.method, local.Local):
1016 raise NonLocalMethod()
1017
1018 if self.parm.get("proto", None) and "protocol" not in self.parm:
1019 logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
1020 self.parm["protocol"] = self.parm.get("proto", None)
1021
1022 if hasattr(self.method, "urldata_init"):
1023 self.method.urldata_init(self, d)
1024
1025 if "localpath" in self.parm:
1026 # if user sets localpath for file, use it instead.
1027 self.localpath = self.parm["localpath"]
1028 self.basename = os.path.basename(self.localpath)
1029 elif self.localfile:
1030 self.localpath = self.method.localpath(self.url, self, d)
1031
1032 dldir = d.getVar("DL_DIR", True)
1033 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1034 if self.localpath and self.localpath.startswith(dldir):
1035 basepath = self.localpath
1036 elif self.localpath:
1037 basepath = dldir + os.sep + os.path.basename(self.localpath)
1038 else:
1039 basepath = dldir + os.sep + (self.basepath or self.basename)
1040 self.donestamp = basepath + '.done'
1041 self.lockfile = basepath + '.lock'
1042
1043 def setup_revisons(self, d):
1044 self.revisions = {}
1045 for name in self.names:
1046 self.revisions[name] = srcrev_internal_helper(self, d, name)
1047
1048 # add compatibility code for non name specified case
1049 if len(self.names) == 1:
1050 self.revision = self.revisions[self.names[0]]
1051
1052 def setup_localpath(self, d):
1053 if not self.localpath:
1054 self.localpath = self.method.localpath(self.url, self, d)
1055
1056 def getSRCDate(self, d):
1057 """
1058 Return the SRC Date for the component
1059
1060 d the bb.data module
1061 """
1062 if "srcdate" in self.parm:
1063 return self.parm['srcdate']
1064
1065 pn = d.getVar("PN", True)
1066
1067 if pn:
1068 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1069
1070 return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1071
1072class FetchMethod(object):
1073 """Base class for 'fetch'ing data"""
1074
1075 def __init__(self, urls = []):
1076 self.urls = []
1077
1078 def supports(self, url, urldata, d):
1079 """
1080 Check to see if this fetch class supports a given url.
1081 """
1082 return 0
1083
1084 def localpath(self, url, urldata, d):
1085 """
1086 Return the local filename of a given url assuming a successful fetch.
1087 Can also setup variables in urldata for use in go (saving code duplication
1088 and duplicate code execution)
1089 """
1090 return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
1091
1092 def supports_checksum(self, urldata):
1093 """
1094 Is localpath something that can be represented by a checksum?
1095 """
1096
1097 # We cannot compute checksums for directories
1098 if os.path.isdir(urldata.localpath) == True:
1099 return False
1100 if urldata.localpath.find("*") != -1:
1101 return False
1102
1103 return True
1104
1105 def recommends_checksum(self, urldata):
1106 """
1107 Is the backend on where checksumming is recommended (should warnings
1108 be displayed if there is no checksum)?
1109 """
1110 return False
1111
1112 def _strip_leading_slashes(self, relpath):
1113 """
1114 Remove leading slash as os.path.join can't cope
1115 """
1116 while os.path.isabs(relpath):
1117 relpath = relpath[1:]
1118 return relpath
1119
1120 def setUrls(self, urls):
1121 self.__urls = urls
1122
1123 def getUrls(self):
1124 return self.__urls
1125
1126 urls = property(getUrls, setUrls, None, "Urls property")
1127
1128 def need_update(self, url, ud, d):
1129 """
1130 Force a fetch, even if localpath exists?
1131 """
1132 if os.path.exists(ud.localpath):
1133 return False
1134 return True
1135
1136 def supports_srcrev(self):
1137 """
1138 The fetcher supports auto source revisions (SRCREV)
1139 """
1140 return False
1141
1142 def download(self, url, urldata, d):
1143 """
1144 Fetch urls
1145 Assumes localpath was called first
1146 """
1147 raise NoMethodError(url)
1148
1149 def unpack(self, urldata, rootdir, data):
1150 iterate = False
1151 file = urldata.localpath
1152
1153 try:
1154 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1155 except ValueError as exc:
1156 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1157 (file, urldata.parm.get('unpack')))
1158
1159 dots = file.split(".")
1160 if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
1161 efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
1162 else:
1163 efile = file
1164 cmd = None
1165
1166 if unpack:
1167 if file.endswith('.tar'):
1168 cmd = 'tar x --no-same-owner -f %s' % file
1169 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1170 cmd = 'tar xz --no-same-owner -f %s' % file
1171 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1172 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1173 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1174 cmd = 'gzip -dc %s > %s' % (file, efile)
1175 elif file.endswith('.bz2'):
1176 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1177 elif file.endswith('.tar.xz'):
1178 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1179 elif file.endswith('.xz'):
1180 cmd = 'xz -dc %s > %s' % (file, efile)
1181 elif file.endswith('.zip') or file.endswith('.jar'):
1182 try:
1183 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1184 except ValueError as exc:
1185 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1186 (file, urldata.parm.get('dos')))
1187 cmd = 'unzip -q -o'
1188 if dos:
1189 cmd = '%s -a' % cmd
1190 cmd = "%s '%s'" % (cmd, file)
1191 elif file.endswith('.rpm') or file.endswith('.srpm'):
1192 if 'extract' in urldata.parm:
1193 unpack_file = urldata.parm.get('extract')
1194 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1195 iterate = True
1196 iterate_file = unpack_file
1197 else:
1198 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1199 elif file.endswith('.deb') or file.endswith('.ipk'):
1200 cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
1201
1202 if not unpack or not cmd:
1203 # If file == dest, then avoid any copies, as we already put the file into dest!
1204 dest = os.path.join(rootdir, os.path.basename(file))
1205 if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1206 if os.path.isdir(file):
1207 # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
1208 basepath = getattr(urldata, "basepath", None)
1209 destdir = "."
1210 if basepath and basepath.endswith("/"):
1211 basepath = basepath.rstrip("/")
1212 elif basepath:
1213 basepath = os.path.dirname(basepath)
1214 if basepath and basepath.find("/") != -1:
1215 destdir = basepath[:basepath.rfind('/')]
1216 destdir = destdir.strip('/')
1217 if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
1218 os.makedirs("%s/%s" % (rootdir, destdir))
1219 cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir)
1220 #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
1221 else:
1222 # The "destdir" handling was specifically done for FILESPATH
1223 # items. So, only do so for file:// entries.
1224 if urldata.type == "file" and urldata.path.find("/") != -1:
1225 destdir = urldata.path.rsplit("/", 1)[0]
1226 else:
1227 destdir = "."
1228 bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
1229 cmd = 'cp %s %s/%s/' % (file, rootdir, destdir)
1230
1231 if not cmd:
1232 return
1233
1234 # Change to subdir before executing command
1235 save_cwd = os.getcwd();
1236 os.chdir(rootdir)
1237 if 'subdir' in urldata.parm:
1238 newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
1239 bb.utils.mkdirhier(newdir)
1240 os.chdir(newdir)
1241
1242 path = data.getVar('PATH', True)
1243 if path:
1244 cmd = "PATH=\"%s\" %s" % (path, cmd)
1245 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
1246 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
1247
1248 os.chdir(save_cwd)
1249
1250 if ret != 0:
1251 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1252
1253 if iterate is True:
1254 iterate_urldata = urldata
1255 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1256 self.unpack(urldata, rootdir, data)
1257
1258 return
1259
1260 def clean(self, urldata, d):
1261 """
1262 Clean any existing full or partial download
1263 """
1264 bb.utils.remove(urldata.localpath)
1265
1266 def try_premirror(self, url, urldata, d):
1267 """
1268 Should premirrors be used?
1269 """
1270 return True
1271
1272 def checkstatus(self, url, urldata, d):
1273 """
1274 Check the status of a URL
1275 Assumes localpath was called first
1276 """
1277 logger.info("URL %s could not be checked for status since no method exists.", url)
1278 return True
1279
1280 def latest_revision(self, url, ud, d, name):
1281 """
1282 Look in the cache for the latest revision, if not present ask the SCM.
1283 """
1284 if not hasattr(self, "_latest_revision"):
1285 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1286
1287 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1288 key = self.generate_revision_key(url, ud, d, name)
1289 try:
1290 return revs[key]
1291 except KeyError:
1292 revs[key] = rev = self._latest_revision(url, ud, d, name)
1293 return rev
1294
1295 def sortable_revision(self, url, ud, d, name):
1296 latest_rev = self._build_revision(url, ud, d, name)
1297 return True, str(latest_rev)
1298
1299 def generate_revision_key(self, url, ud, d, name):
1300 key = self._revision_key(url, ud, d, name)
1301 return "%s-%s" % (key, d.getVar("PN", True) or "")
1302
1303class Fetch(object):
1304 def __init__(self, urls, d, cache = True, localonly = False):
1305 if localonly and cache:
1306 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1307
1308 if len(urls) == 0:
1309 urls = d.getVar("SRC_URI", True).split()
1310 self.urls = urls
1311 self.d = d
1312 self.ud = {}
1313
1314 fn = d.getVar('FILE', True)
1315 if cache and fn and fn in urldata_cache:
1316 self.ud = urldata_cache[fn]
1317
1318 for url in urls:
1319 if url not in self.ud:
1320 try:
1321 self.ud[url] = FetchData(url, d, localonly)
1322 except NonLocalMethod:
1323 if localonly:
1324 self.ud[url] = None
1325 pass
1326
1327 if fn and cache:
1328 urldata_cache[fn] = self.ud
1329
1330 def localpath(self, url):
1331 if url not in self.urls:
1332 self.ud[url] = FetchData(url, self.d)
1333
1334 self.ud[url].setup_localpath(self.d)
1335 return self.d.expand(self.ud[url].localpath)
1336
1337 def localpaths(self):
1338 """
1339 Return a list of the local filenames, assuming successful fetch
1340 """
1341 local = []
1342
1343 for u in self.urls:
1344 ud = self.ud[u]
1345 ud.setup_localpath(self.d)
1346 local.append(ud.localpath)
1347
1348 return local
1349
1350 def download(self, urls = []):
1351 """
1352 Fetch all urls
1353 """
1354 if len(urls) == 0:
1355 urls = self.urls
1356
1357 network = self.d.getVar("BB_NO_NETWORK", True)
1358 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1359
1360 for u in urls:
1361 ud = self.ud[u]
1362 ud.setup_localpath(self.d)
1363 m = ud.method
1364 localpath = ""
1365
1366 lf = bb.utils.lockfile(ud.lockfile)
1367
1368 try:
1369 self.d.setVar("BB_NO_NETWORK", network)
1370
1371 if os.path.exists(ud.donestamp) and not m.need_update(u, ud, self.d):
1372 localpath = ud.localpath
1373 elif m.try_premirror(u, ud, self.d):
1374 logger.debug(1, "Trying PREMIRRORS")
1375 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1376 localpath = try_mirrors(self.d, ud, mirrors, False)
1377
1378 if premirroronly:
1379 self.d.setVar("BB_NO_NETWORK", "1")
1380
1381 os.chdir(self.d.getVar("DL_DIR", True))
1382
1383 firsterr = None
1384 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(u, ud, self.d)):
1385 try:
1386 logger.debug(1, "Trying Upstream")
1387 m.download(u, ud, self.d)
1388 if hasattr(m, "build_mirror_data"):
1389 m.build_mirror_data(u, ud, self.d)
1390 localpath = ud.localpath
1391 # early checksum verify, so that if checksum mismatched,
1392 # fetcher still have chance to fetch from mirror
1393 update_stamp(u, ud, self.d)
1394
1395 except bb.fetch2.NetworkAccess:
1396 raise
1397
1398 except BBFetchException as e:
1399 if isinstance(e, ChecksumError):
1400 logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1401 logger.debug(1, str(e))
1402 rename_bad_checksum(ud, e.checksum)
1403 elif isinstance(e, NoChecksumError):
1404 raise
1405 else:
1406 logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1407 logger.debug(1, str(e))
1408 firsterr = e
1409 # Remove any incomplete fetch
1410 m.clean(ud, self.d)
1411 logger.debug(1, "Trying MIRRORS")
1412 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1413 localpath = try_mirrors (self.d, ud, mirrors)
1414
1415 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1416 if firsterr:
1417 logger.error(str(firsterr))
1418 raise FetchError("Unable to fetch URL from any source.", u)
1419
1420 update_stamp(u, ud, self.d)
1421
1422 except BBFetchException as e:
1423 if isinstance(e, NoChecksumError):
1424 logger.error("%s" % str(e))
1425 elif isinstance(e, ChecksumError):
1426 logger.error("Checksum failure fetching %s" % u)
1427 raise
1428
1429 finally:
1430 bb.utils.unlockfile(lf)
1431
1432 def checkstatus(self, urls = []):
1433 """
1434 Check all urls exist upstream
1435 """
1436
1437 if len(urls) == 0:
1438 urls = self.urls
1439
1440 for u in urls:
1441 ud = self.ud[u]
1442 ud.setup_localpath(self.d)
1443 m = ud.method
1444 logger.debug(1, "Testing URL %s", u)
1445 # First try checking uri, u, from PREMIRRORS
1446 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1447 ret = try_mirrors(self.d, ud, mirrors, True)
1448 if not ret:
1449 # Next try checking from the original uri, u
1450 try:
1451 ret = m.checkstatus(u, ud, self.d)
1452 except:
1453 # Finally, try checking uri, u, from MIRRORS
1454 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1455 ret = try_mirrors(self.d, ud, mirrors, True)
1456
1457 if not ret:
1458 raise FetchError("URL %s doesn't work" % u, u)
1459
1460 def unpack(self, root, urls = []):
1461 """
1462 Check all urls exist upstream
1463 """
1464
1465 if len(urls) == 0:
1466 urls = self.urls
1467
1468 for u in urls:
1469 ud = self.ud[u]
1470 ud.setup_localpath(self.d)
1471
1472 if self.d.expand(self.localpath) is None:
1473 continue
1474
1475 if ud.lockfile:
1476 lf = bb.utils.lockfile(ud.lockfile)
1477
1478 ud.method.unpack(ud, root, self.d)
1479
1480 if ud.lockfile:
1481 bb.utils.unlockfile(lf)
1482
1483 def clean(self, urls = []):
1484 """
1485 Clean files that the fetcher gets or places
1486 """
1487
1488 if len(urls) == 0:
1489 urls = self.urls
1490
1491 for url in urls:
1492 if url not in self.ud:
1493 self.ud[url] = FetchData(url, d)
1494 ud = self.ud[url]
1495 ud.setup_localpath(self.d)
1496
1497 if not ud.localfile or self.localpath is None:
1498 continue
1499
1500 if ud.lockfile:
1501 lf = bb.utils.lockfile(ud.lockfile)
1502
1503 ud.method.clean(ud, self.d)
1504 if ud.donestamp:
1505 bb.utils.remove(ud.donestamp)
1506
1507 if ud.lockfile:
1508 bb.utils.unlockfile(lf)
1509
1510from . import cvs
1511from . import git
1512from . import gitsm
1513from . import local
1514from . import svn
1515from . import wget
1516from . import svk
1517from . import ssh
1518from . import sftp
1519from . import perforce
1520from . import bzr
1521from . import hg
1522from . import osc
1523from . import repo
1524
1525methods.append(local.Local())
1526methods.append(wget.Wget())
1527methods.append(svn.Svn())
1528methods.append(git.Git())
1529methods.append(gitsm.GitSM())
1530methods.append(cvs.Cvs())
1531methods.append(svk.Svk())
1532methods.append(ssh.SSH())
1533methods.append(sftp.SFTP())
1534methods.append(perforce.Perforce())
1535methods.append(bzr.Bzr())
1536methods.append(hg.Hg())
1537methods.append(osc.Osc())
1538methods.append(repo.Repo())