summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1575
1 files changed, 1575 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000000..5a03a0e46e
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1575 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from __future__ import absolute_import
29from __future__ import print_function
30import os, re
31import signal
32import glob
33import logging
34import urllib
35import urlparse
36import operator
37import bb.persist_data, bb.utils
38import bb.checksum
39from bb import data
40import bb.process
41import subprocess
42
43__version__ = "2"
44_checksum_cache = bb.checksum.FileChecksumCache()
45
46logger = logging.getLogger("BitBake.Fetcher")
47
48class BBFetchException(Exception):
49 """Class all fetch exceptions inherit from"""
50 def __init__(self, message):
51 self.msg = message
52 Exception.__init__(self, message)
53
54 def __str__(self):
55 return self.msg
56
57class MalformedUrl(BBFetchException):
58 """Exception raised when encountering an invalid url"""
59 def __init__(self, url):
60 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
61 self.url = url
62 BBFetchException.__init__(self, msg)
63 self.args = (url,)
64
65class FetchError(BBFetchException):
66 """General fetcher exception when something happens incorrectly"""
67 def __init__(self, message, url = None):
68 if url:
69 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
70 else:
71 msg = "Fetcher failure: %s" % message
72 self.url = url
73 BBFetchException.__init__(self, msg)
74 self.args = (message, url)
75
76class ChecksumError(FetchError):
77 """Exception when mismatched checksum encountered"""
78 def __init__(self, message, url = None, checksum = None):
79 self.checksum = checksum
80 FetchError.__init__(self, message, url)
81
82class NoChecksumError(FetchError):
83 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
84
85class UnpackError(BBFetchException):
86 """General fetcher exception when something happens incorrectly when unpacking"""
87 def __init__(self, message, url):
88 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
89 self.url = url
90 BBFetchException.__init__(self, msg)
91 self.args = (message, url)
92
93class NoMethodError(BBFetchException):
94 """Exception raised when there is no method to obtain a supplied url or set of urls"""
95 def __init__(self, url):
96 msg = "Could not find a fetcher which supports the URL: '%s'" % url
97 self.url = url
98 BBFetchException.__init__(self, msg)
99 self.args = (url,)
100
101class MissingParameterError(BBFetchException):
102 """Exception raised when a fetch method is missing a critical parameter in the url"""
103 def __init__(self, missing, url):
104 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
105 self.url = url
106 self.missing = missing
107 BBFetchException.__init__(self, msg)
108 self.args = (missing, url)
109
110class ParameterError(BBFetchException):
111 """Exception raised when a url cannot be proccessed due to invalid parameters."""
112 def __init__(self, message, url):
113 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
114 self.url = url
115 BBFetchException.__init__(self, msg)
116 self.args = (message, url)
117
118class NetworkAccess(BBFetchException):
119 """Exception raised when network access is disabled but it is required."""
120 def __init__(self, url, cmd):
121 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
122 self.url = url
123 self.cmd = cmd
124 BBFetchException.__init__(self, msg)
125 self.args = (url, cmd)
126
127class NonLocalMethod(Exception):
128 def __init__(self):
129 Exception.__init__(self)
130
131
132class URI(object):
133 """
134 A class representing a generic URI, with methods for
135 accessing the URI components, and stringifies to the
136 URI.
137
138 It is constructed by calling it with a URI, or setting
139 the attributes manually:
140
141 uri = URI("http://example.com/")
142
143 uri = URI()
144 uri.scheme = 'http'
145 uri.hostname = 'example.com'
146 uri.path = '/'
147
148 It has the following attributes:
149
150 * scheme (read/write)
151 * userinfo (authentication information) (read/write)
152 * username (read/write)
153 * password (read/write)
154
155 Note, password is deprecated as of RFC 3986.
156
157 * hostname (read/write)
158 * port (read/write)
159 * hostport (read only)
160 "hostname:port", if both are set, otherwise just "hostname"
161 * path (read/write)
162 * path_quoted (read/write)
163 A URI quoted version of path
164 * params (dict) (read/write)
165 * query (dict) (read/write)
166 * relative (bool) (read only)
167 True if this is a "relative URI", (e.g. file:foo.diff)
168
169 It stringifies to the URI itself.
170
171 Some notes about relative URIs: while it's specified that
172 a URI beginning with <scheme>:// should either be directly
173 followed by a hostname or a /, the old URI handling of the
174 fetch2 library did not comform to this. Therefore, this URI
175 class has some kludges to make sure that URIs are parsed in
176 a way comforming to bitbake's current usage. This URI class
177 supports the following:
178
179 file:relative/path.diff (IETF compliant)
180 git:relative/path.git (IETF compliant)
181 git:///absolute/path.git (IETF compliant)
182 file:///absolute/path.diff (IETF compliant)
183
184 file://relative/path.diff (not IETF compliant)
185
186 But it does not support the following:
187
188 file://hostname/absolute/path.diff (would be IETF compliant)
189
190 Note that the last case only applies to a list of
191 "whitelisted" schemes (currently only file://), that requires
192 its URIs to not have a network location.
193 """
194
195 _relative_schemes = ['file', 'git']
196 _netloc_forbidden = ['file']
197
198 def __init__(self, uri=None):
199 self.scheme = ''
200 self.userinfo = ''
201 self.hostname = ''
202 self.port = None
203 self._path = ''
204 self.params = {}
205 self.query = {}
206 self.relative = False
207
208 if not uri:
209 return
210
211 # We hijack the URL parameters, since the way bitbake uses
212 # them are not quite RFC compliant.
213 uri, param_str = (uri.split(";", 1) + [None])[:2]
214
215 urlp = urlparse.urlparse(uri)
216 self.scheme = urlp.scheme
217
218 reparse = 0
219
220 # Coerce urlparse to make URI scheme use netloc
221 if not self.scheme in urlparse.uses_netloc:
222 urlparse.uses_params.append(self.scheme)
223 reparse = 1
224
225 # Make urlparse happy(/ier) by converting local resources
226 # to RFC compliant URL format. E.g.:
227 # file://foo.diff -> file:foo.diff
228 if urlp.scheme in self._netloc_forbidden:
229 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
230 reparse = 1
231
232 if reparse:
233 urlp = urlparse.urlparse(uri)
234
235 # Identify if the URI is relative or not
236 if urlp.scheme in self._relative_schemes and \
237 re.compile("^\w+:(?!//)").match(uri):
238 self.relative = True
239
240 if not self.relative:
241 self.hostname = urlp.hostname or ''
242 self.port = urlp.port
243
244 self.userinfo += urlp.username or ''
245
246 if urlp.password:
247 self.userinfo += ':%s' % urlp.password
248
249 self.path = urllib.unquote(urlp.path)
250
251 if param_str:
252 self.params = self._param_str_split(param_str, ";")
253 if urlp.query:
254 self.query = self._param_str_split(urlp.query, "&")
255
256 def __str__(self):
257 userinfo = self.userinfo
258 if userinfo:
259 userinfo += '@'
260
261 return "%s:%s%s%s%s%s%s" % (
262 self.scheme,
263 '' if self.relative else '//',
264 userinfo,
265 self.hostport,
266 self.path_quoted,
267 self._query_str(),
268 self._param_str())
269
270 def _param_str(self):
271 return (
272 ''.join([';', self._param_str_join(self.params, ";")])
273 if self.params else '')
274
275 def _query_str(self):
276 return (
277 ''.join(['?', self._param_str_join(self.query, "&")])
278 if self.query else '')
279
280 def _param_str_split(self, string, elmdelim, kvdelim="="):
281 ret = {}
282 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
283 ret[k] = v
284 return ret
285
286 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
287 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
288
289 @property
290 def hostport(self):
291 if not self.port:
292 return self.hostname
293 return "%s:%d" % (self.hostname, self.port)
294
295 @property
296 def path_quoted(self):
297 return urllib.quote(self.path)
298
299 @path_quoted.setter
300 def path_quoted(self, path):
301 self.path = urllib.unquote(path)
302
303 @property
304 def path(self):
305 return self._path
306
307 @path.setter
308 def path(self, path):
309 self._path = path
310
311 if re.compile("^/").match(path):
312 self.relative = False
313 else:
314 self.relative = True
315
316 @property
317 def username(self):
318 if self.userinfo:
319 return (self.userinfo.split(":", 1))[0]
320 return ''
321
322 @username.setter
323 def username(self, username):
324 password = self.password
325 self.userinfo = username
326 if password:
327 self.userinfo += ":%s" % password
328
329 @property
330 def password(self):
331 if self.userinfo and ":" in self.userinfo:
332 return (self.userinfo.split(":", 1))[1]
333 return ''
334
335 @password.setter
336 def password(self, password):
337 self.userinfo = "%s:%s" % (self.username, password)
338
339def decodeurl(url):
340 """Decodes an URL into the tokens (scheme, network location, path,
341 user, password, parameters).
342 """
343
344 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
345 if not m:
346 raise MalformedUrl(url)
347
348 type = m.group('type')
349 location = m.group('location')
350 if not location:
351 raise MalformedUrl(url)
352 user = m.group('user')
353 parm = m.group('parm')
354
355 locidx = location.find('/')
356 if locidx != -1 and type.lower() != 'file':
357 host = location[:locidx]
358 path = location[locidx:]
359 else:
360 host = ""
361 path = location
362 if user:
363 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
364 if m:
365 user = m.group('user')
366 pswd = m.group('pswd')
367 else:
368 user = ''
369 pswd = ''
370
371 p = {}
372 if parm:
373 for s in parm.split(';'):
374 s1, s2 = s.split('=')
375 p[s1] = s2
376
377 return type, host, urllib.unquote(path), user, pswd, p
378
379def encodeurl(decoded):
380 """Encodes a URL from tokens (scheme, network location, path,
381 user, password, parameters).
382 """
383
384 type, host, path, user, pswd, p = decoded
385
386 if not path:
387 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
388 if not type:
389 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
390 url = '%s://' % type
391 if user and type != "file":
392 url += "%s" % user
393 if pswd:
394 url += ":%s" % pswd
395 url += "@"
396 if host and type != "file":
397 url += "%s" % host
398 # Standardise path to ensure comparisons work
399 while '//' in path:
400 path = path.replace("//", "/")
401 url += "%s" % urllib.quote(path)
402 if p:
403 for parm in p:
404 url += ";%s=%s" % (parm, p[parm])
405
406 return url
407
408def uri_replace(ud, uri_find, uri_replace, replacements, d):
409 if not ud.url or not uri_find or not uri_replace:
410 logger.error("uri_replace: passed an undefined value, not replacing")
411 return None
412 uri_decoded = list(decodeurl(ud.url))
413 uri_find_decoded = list(decodeurl(uri_find))
414 uri_replace_decoded = list(decodeurl(uri_replace))
415 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
416 result_decoded = ['', '', '', '', '', {}]
417 for loc, i in enumerate(uri_find_decoded):
418 result_decoded[loc] = uri_decoded[loc]
419 regexp = i
420 if loc == 0 and regexp and not regexp.endswith("$"):
421 # Leaving the type unanchored can mean "https" matching "file" can become "files"
422 # which is clearly undesirable.
423 regexp += "$"
424 if loc == 5:
425 # Handle URL parameters
426 if i:
427 # Any specified URL parameters must match
428 for k in uri_replace_decoded[loc]:
429 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
430 return None
431 # Overwrite any specified replacement parameters
432 for k in uri_replace_decoded[loc]:
433 for l in replacements:
434 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
435 result_decoded[loc][k] = uri_replace_decoded[loc][k]
436 elif (re.match(regexp, uri_decoded[loc])):
437 if not uri_replace_decoded[loc]:
438 result_decoded[loc] = ""
439 else:
440 for k in replacements:
441 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
442 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
443 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
444 if loc == 2:
445 # Handle path manipulations
446 basename = None
447 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
448 # If the source and destination url types differ, must be a mirrortarball mapping
449 basename = os.path.basename(ud.mirrortarball)
450 # Kill parameters, they make no sense for mirror tarballs
451 uri_decoded[5] = {}
452 elif ud.localpath and ud.method.supports_checksum(ud):
453 basename = os.path.basename(ud.localpath)
454 if basename and not result_decoded[loc].endswith(basename):
455 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
456 else:
457 return None
458 result = encodeurl(result_decoded)
459 if result == ud.url:
460 return None
461 logger.debug(2, "For url %s returning %s" % (ud.url, result))
462 return result
463
464methods = []
465urldata_cache = {}
466saved_headrevs = {}
467
468def fetcher_init(d):
469 """
470 Called to initialize the fetchers once the configuration data is known.
471 Calls before this must not hit the cache.
472 """
473 # When to drop SCM head revisions controlled by user policy
474 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
475 if srcrev_policy == "cache":
476 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
477 elif srcrev_policy == "clear":
478 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
479 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
480 try:
481 bb.fetch2.saved_headrevs = revs.items()
482 except:
483 pass
484 revs.clear()
485 else:
486 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
487
488 _checksum_cache.init_cache(d)
489
490 for m in methods:
491 if hasattr(m, "init"):
492 m.init(d)
493
494def fetcher_parse_save(d):
495 _checksum_cache.save_extras(d)
496
497def fetcher_parse_done(d):
498 _checksum_cache.save_merge(d)
499
500def fetcher_compare_revisions(d):
501 """
502 Compare the revisions in the persistant cache with current values and
503 return true/false on whether they've changed.
504 """
505
506 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
507 data2 = bb.fetch2.saved_headrevs
508
509 changed = False
510 for key in data:
511 if key not in data2 or data2[key] != data[key]:
512 logger.debug(1, "%s changed", key)
513 changed = True
514 return True
515 else:
516 logger.debug(2, "%s did not change", key)
517 return False
518
519def mirror_from_string(data):
520 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
521
522def verify_checksum(ud, d):
523 """
524 verify the MD5 and SHA256 checksum for downloaded src
525
526 Raises a FetchError if one or both of the SRC_URI checksums do not match
527 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
528 checksums specified.
529
530 """
531
532 if not ud.method.supports_checksum(ud):
533 return
534
535 md5data = bb.utils.md5_file(ud.localpath)
536 sha256data = bb.utils.sha256_file(ud.localpath)
537
538 if ud.method.recommends_checksum(ud):
539 # If strict checking enabled and neither sum defined, raise error
540 strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
541 if strict and not (ud.md5_expected or ud.sha256_expected):
542 logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
543 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
544 (ud.localpath, ud.md5_name, md5data,
545 ud.sha256_name, sha256data))
546 raise NoChecksumError('Missing SRC_URI checksum', ud.url)
547
548 # Log missing sums so user can more easily add them
549 if not ud.md5_expected:
550 logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
551 'SRC_URI[%s] = "%s"',
552 ud.localpath, ud.md5_name, md5data)
553
554 if not ud.sha256_expected:
555 logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
556 'SRC_URI[%s] = "%s"',
557 ud.localpath, ud.sha256_name, sha256data)
558
559 md5mismatch = False
560 sha256mismatch = False
561
562 if ud.md5_expected != md5data:
563 md5mismatch = True
564
565 if ud.sha256_expected != sha256data:
566 sha256mismatch = True
567
568 # We want to alert the user if a checksum is defined in the recipe but
569 # it does not match.
570 msg = ""
571 mismatch = False
572 if md5mismatch and ud.md5_expected:
573 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
574 mismatch = True;
575
576 if sha256mismatch and ud.sha256_expected:
577 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
578 mismatch = True;
579
580 if mismatch:
581 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
582
583 if len(msg):
584 raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
585
586
587def update_stamp(ud, d):
588 """
589 donestamp is file stamp indicating the whole fetching is done
590 this function update the stamp after verifying the checksum
591 """
592 if os.path.exists(ud.donestamp):
593 # Touch the done stamp file to show active use of the download
594 try:
595 os.utime(ud.donestamp, None)
596 except:
597 # Errors aren't fatal here
598 pass
599 else:
600 verify_checksum(ud, d)
601 open(ud.donestamp, 'w').close()
602
603def subprocess_setup():
604 # Python installs a SIGPIPE handler by default. This is usually not what
605 # non-Python subprocesses expect.
606 # SIGPIPE errors are known issues with gzip/bash
607 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
608
609def get_autorev(d):
610 # only not cache src rev in autorev case
611 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
612 d.setVar('__BB_DONT_CACHE', '1')
613 return "AUTOINC"
614
615def get_srcrev(d):
616 """
617 Return the version string for the current package
618 (usually to be used as PV)
619 Most packages usually only have one SCM so we just pass on the call.
620 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
621 have been set.
622 """
623
624 scms = []
625 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
626 urldata = fetcher.ud
627 for u in urldata:
628 if urldata[u].method.supports_srcrev():
629 scms.append(u)
630
631 if len(scms) == 0:
632 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
633
634 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
635 autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
636 if len(rev) > 10:
637 rev = rev[:10]
638 if autoinc:
639 return "AUTOINC+" + rev
640 return rev
641
642 #
643 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
644 #
645 format = d.getVar('SRCREV_FORMAT', True)
646 if not format:
647 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
648
649 seenautoinc = False
650 for scm in scms:
651 ud = urldata[scm]
652 for name in ud.names:
653 autoinc, rev = ud.method.sortable_revision(ud, d, name)
654 seenautoinc = seenautoinc or autoinc
655 if len(rev) > 10:
656 rev = rev[:10]
657 format = format.replace(name, rev)
658 if seenautoinc:
659 format = "AUTOINC+" + format
660
661 return format
662
663def localpath(url, d):
664 fetcher = bb.fetch2.Fetch([url], d)
665 return fetcher.localpath(url)
666
667def runfetchcmd(cmd, d, quiet = False, cleanup = []):
668 """
669 Run cmd returning the command output
670 Raise an error if interrupted or cmd fails
671 Optionally echo command output to stdout
672 Optionally remove the files/directories listed in cleanup upon failure
673 """
674
675 # Need to export PATH as binary could be in metadata paths
676 # rather than host provided
677 # Also include some other variables.
678 # FIXME: Should really include all export varaiables?
679 exportvars = ['HOME', 'PATH',
680 'HTTP_PROXY', 'http_proxy',
681 'HTTPS_PROXY', 'https_proxy',
682 'FTP_PROXY', 'ftp_proxy',
683 'FTPS_PROXY', 'ftps_proxy',
684 'NO_PROXY', 'no_proxy',
685 'ALL_PROXY', 'all_proxy',
686 'GIT_PROXY_COMMAND',
687 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
688 'SOCKS5_USER', 'SOCKS5_PASSWD']
689
690 for var in exportvars:
691 val = d.getVar(var, True)
692 if val:
693 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
694
695 logger.debug(1, "Running %s", cmd)
696
697 success = False
698 error_message = ""
699
700 try:
701 (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
702 success = True
703 except bb.process.NotFoundError as e:
704 error_message = "Fetch command %s" % (e.command)
705 except bb.process.ExecutionError as e:
706 if e.stdout:
707 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
708 elif e.stderr:
709 output = "output:\n%s" % e.stderr
710 else:
711 output = "no output"
712 error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
713 except bb.process.CmdError as e:
714 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
715 if not success:
716 for f in cleanup:
717 try:
718 bb.utils.remove(f, True)
719 except OSError:
720 pass
721
722 raise FetchError(error_message)
723
724 return output
725
726def check_network_access(d, info = "", url = None):
727 """
728 log remote network access, and error if BB_NO_NETWORK is set
729 """
730 if d.getVar("BB_NO_NETWORK", True) == "1":
731 raise NetworkAccess(url, info)
732 else:
733 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
734
735def build_mirroruris(origud, mirrors, ld):
736 uris = []
737 uds = []
738
739 replacements = {}
740 replacements["TYPE"] = origud.type
741 replacements["HOST"] = origud.host
742 replacements["PATH"] = origud.path
743 replacements["BASENAME"] = origud.path.split("/")[-1]
744 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
745
746 def adduri(ud, uris, uds):
747 for line in mirrors:
748 try:
749 (find, replace) = line
750 except ValueError:
751 continue
752 newuri = uri_replace(ud, find, replace, replacements, ld)
753 if not newuri or newuri in uris or newuri == origud.url:
754 continue
755 try:
756 newud = FetchData(newuri, ld)
757 newud.setup_localpath(ld)
758 except bb.fetch2.BBFetchException as e:
759 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
760 logger.debug(1, str(e))
761 try:
762 ud.method.clean(ud, ld)
763 except UnboundLocalError:
764 pass
765 continue
766 uris.append(newuri)
767 uds.append(newud)
768
769 adduri(newud, uris, uds)
770
771 adduri(origud, uris, uds)
772
773 return uris, uds
774
775def rename_bad_checksum(ud, suffix):
776 """
777 Renames files to have suffix from parameter
778 """
779
780 if ud.localpath is None:
781 return
782
783 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
784 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
785 bb.utils.movefile(ud.localpath, new_localpath)
786
787
788def try_mirror_url(origud, ud, ld, check = False):
789 # Return of None or a value means we're finished
790 # False means try another url
791 try:
792 if check:
793 found = ud.method.checkstatus(ud, ld)
794 if found:
795 return found
796 return False
797
798 os.chdir(ld.getVar("DL_DIR", True))
799
800 if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
801 ud.method.download(ud, ld)
802 if hasattr(ud.method,"build_mirror_data"):
803 ud.method.build_mirror_data(ud, ld)
804
805 if not ud.localpath or not os.path.exists(ud.localpath):
806 return False
807
808 if ud.localpath == origud.localpath:
809 return ud.localpath
810
811 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
812 # If that tarball is a local file:// we need to provide a symlink to it
813 dldir = ld.getVar("DL_DIR", True)
814 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
815 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
816 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
817 open(ud.donestamp, 'w').close()
818 dest = os.path.join(dldir, os.path.basename(ud.localpath))
819 if not os.path.exists(dest):
820 os.symlink(ud.localpath, dest)
821 if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
822 origud.method.download(origud, ld)
823 if hasattr(origud.method,"build_mirror_data"):
824 origud.method.build_mirror_data(origud, ld)
825 return ud.localpath
826 # Otherwise the result is a local file:// and we symlink to it
827 if not os.path.exists(origud.localpath):
828 if os.path.islink(origud.localpath):
829 # Broken symbolic link
830 os.unlink(origud.localpath)
831
832 os.symlink(ud.localpath, origud.localpath)
833 update_stamp(origud, ld)
834 return ud.localpath
835
836 except bb.fetch2.NetworkAccess:
837 raise
838
839 except bb.fetch2.BBFetchException as e:
840 if isinstance(e, ChecksumError):
841 logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
842 logger.warn(str(e))
843 rename_bad_checksum(ud, e.checksum)
844 elif isinstance(e, NoChecksumError):
845 raise
846 else:
847 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
848 logger.debug(1, str(e))
849 try:
850 ud.method.clean(ud, ld)
851 except UnboundLocalError:
852 pass
853 return False
854
855def try_mirrors(d, origud, mirrors, check = False):
856 """
857 Try to use a mirrored version of the sources.
858 This method will be automatically called before the fetchers go.
859
860 d Is a bb.data instance
861 uri is the original uri we're trying to download
862 mirrors is the list of mirrors we're going to try
863 """
864 ld = d.createCopy()
865
866 uris, uds = build_mirroruris(origud, mirrors, ld)
867
868 for index, uri in enumerate(uris):
869 ret = try_mirror_url(origud, uds[index], ld, check)
870 if ret != False:
871 return ret
872 return None
873
874def srcrev_internal_helper(ud, d, name):
875 """
876 Return:
877 a) a source revision if specified
878 b) latest revision if SRCREV="AUTOINC"
879 c) None if not specified
880 """
881
882 srcrev = None
883 pn = d.getVar("PN", True)
884 attempts = []
885 if name != '' and pn:
886 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
887 if name != '':
888 attempts.append("SRCREV_%s" % name)
889 if pn:
890 attempts.append("SRCREV_pn-%s" % pn)
891 attempts.append("SRCREV")
892
893 for a in attempts:
894 srcrev = d.getVar(a, True)
895 if srcrev and srcrev != "INVALID":
896 break
897
898 if 'rev' in ud.parm and 'tag' in ud.parm:
899 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
900
901 if 'rev' in ud.parm or 'tag' in ud.parm:
902 if 'rev' in ud.parm:
903 parmrev = ud.parm['rev']
904 else:
905 parmrev = ud.parm['tag']
906 if srcrev == "INVALID" or not srcrev:
907 return parmrev
908 if srcrev != parmrev:
909 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
910 return parmrev
911
912 if srcrev == "INVALID" or not srcrev:
913 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
914 if srcrev == "AUTOINC":
915 srcrev = ud.method.latest_revision(ud, d, name)
916
917 return srcrev
918
919def get_checksum_file_list(d):
920 """ Get a list of files checksum in SRC_URI
921
922 Returns the resolved local paths of all local file entries in
923 SRC_URI as a space-separated string
924 """
925 fetch = Fetch([], d, cache = False, localonly = True)
926
927 dl_dir = d.getVar('DL_DIR', True)
928 filelist = []
929 for u in fetch.urls:
930 ud = fetch.ud[u]
931
932 if ud and isinstance(ud.method, local.Local):
933 ud.setup_localpath(d)
934 f = ud.localpath
935 pth = ud.decodedurl
936 if '*' in pth:
937 f = os.path.join(os.path.abspath(f), pth)
938 if f.startswith(dl_dir):
939 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
940 if os.path.exists(f):
941 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
942 else:
943 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
944 filelist.append(f)
945
946 return " ".join(filelist)
947
948
949def get_file_checksums(filelist, pn):
950 """Get a list of the checksums for a list of local files
951
952 Returns the checksums for a list of local files, caching the results as
953 it proceeds
954
955 """
956
957 def checksum_file(f):
958 try:
959 checksum = _checksum_cache.get_checksum(f)
960 except OSError as e:
961 bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
962 return None
963 return checksum
964
965 def checksum_dir(pth):
966 # Handle directories recursively
967 dirchecksums = []
968 for root, dirs, files in os.walk(pth):
969 for name in files:
970 fullpth = os.path.join(root, name)
971 checksum = checksum_file(fullpth)
972 if checksum:
973 dirchecksums.append((fullpth, checksum))
974 return dirchecksums
975
976 checksums = []
977 for pth in filelist.split():
978 checksum = None
979 if '*' in pth:
980 # Handle globs
981 for f in glob.glob(pth):
982 if os.path.isdir(f):
983 checksums.extend(checksum_dir(f))
984 else:
985 checksum = checksum_file(f)
986 if checksum:
987 checksums.append((f, checksum))
988 continue
989 elif os.path.isdir(pth):
990 checksums.extend(checksum_dir(pth))
991 continue
992 else:
993 checksum = checksum_file(pth)
994
995 if checksum:
996 checksums.append((pth, checksum))
997
998 checksums.sort(key=operator.itemgetter(1))
999 return checksums
1000
1001
1002class FetchData(object):
1003 """
1004 A class which represents the fetcher state for a given URI.
1005 """
1006 def __init__(self, url, d, localonly = False):
1007 # localpath is the location of a downloaded result. If not set, the file is local.
1008 self.donestamp = None
1009 self.localfile = ""
1010 self.localpath = None
1011 self.lockfile = None
1012 self.mirrortarball = None
1013 self.basename = None
1014 self.basepath = None
1015 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
1016 self.date = self.getSRCDate(d)
1017 self.url = url
1018 if not self.user and "user" in self.parm:
1019 self.user = self.parm["user"]
1020 if not self.pswd and "pswd" in self.parm:
1021 self.pswd = self.parm["pswd"]
1022 self.setup = False
1023
1024 if "name" in self.parm:
1025 self.md5_name = "%s.md5sum" % self.parm["name"]
1026 self.sha256_name = "%s.sha256sum" % self.parm["name"]
1027 else:
1028 self.md5_name = "md5sum"
1029 self.sha256_name = "sha256sum"
1030 if self.md5_name in self.parm:
1031 self.md5_expected = self.parm[self.md5_name]
1032 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1033 self.md5_expected = None
1034 else:
1035 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
1036 if self.sha256_name in self.parm:
1037 self.sha256_expected = self.parm[self.sha256_name]
1038 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1039 self.sha256_expected = None
1040 else:
1041 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
1042
1043 self.names = self.parm.get("name",'default').split(',')
1044
1045 self.method = None
1046 for m in methods:
1047 if m.supports(self, d):
1048 self.method = m
1049 break
1050
1051 if not self.method:
1052 raise NoMethodError(url)
1053
1054 if localonly and not isinstance(self.method, local.Local):
1055 raise NonLocalMethod()
1056
1057 if self.parm.get("proto", None) and "protocol" not in self.parm:
1058 logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
1059 self.parm["protocol"] = self.parm.get("proto", None)
1060
1061 if hasattr(self.method, "urldata_init"):
1062 self.method.urldata_init(self, d)
1063
1064 if "localpath" in self.parm:
1065 # if user sets localpath for file, use it instead.
1066 self.localpath = self.parm["localpath"]
1067 self.basename = os.path.basename(self.localpath)
1068 elif self.localfile:
1069 self.localpath = self.method.localpath(self, d)
1070
1071 dldir = d.getVar("DL_DIR", True)
1072 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1073 if self.localpath and self.localpath.startswith(dldir):
1074 basepath = self.localpath
1075 elif self.localpath:
1076 basepath = dldir + os.sep + os.path.basename(self.localpath)
1077 else:
1078 basepath = dldir + os.sep + (self.basepath or self.basename)
1079 self.donestamp = basepath + '.done'
1080 self.lockfile = basepath + '.lock'
1081
1082 def setup_revisons(self, d):
1083 self.revisions = {}
1084 for name in self.names:
1085 self.revisions[name] = srcrev_internal_helper(self, d, name)
1086
1087 # add compatibility code for non name specified case
1088 if len(self.names) == 1:
1089 self.revision = self.revisions[self.names[0]]
1090
1091 def setup_localpath(self, d):
1092 if not self.localpath:
1093 self.localpath = self.method.localpath(self, d)
1094
1095 def getSRCDate(self, d):
1096 """
1097 Return the SRC Date for the component
1098
1099 d the bb.data module
1100 """
1101 if "srcdate" in self.parm:
1102 return self.parm['srcdate']
1103
1104 pn = d.getVar("PN", True)
1105
1106 if pn:
1107 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1108
1109 return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1110
1111class FetchMethod(object):
1112 """Base class for 'fetch'ing data"""
1113
1114 def __init__(self, urls = []):
1115 self.urls = []
1116
1117 def supports(self, urldata, d):
1118 """
1119 Check to see if this fetch class supports a given url.
1120 """
1121 return 0
1122
1123 def localpath(self, urldata, d):
1124 """
1125 Return the local filename of a given url assuming a successful fetch.
1126 Can also setup variables in urldata for use in go (saving code duplication
1127 and duplicate code execution)
1128 """
1129 return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
1130
1131 def supports_checksum(self, urldata):
1132 """
1133 Is localpath something that can be represented by a checksum?
1134 """
1135
1136 # We cannot compute checksums for directories
1137 if os.path.isdir(urldata.localpath) == True:
1138 return False
1139 if urldata.localpath.find("*") != -1:
1140 return False
1141
1142 return True
1143
1144 def recommends_checksum(self, urldata):
1145 """
1146 Is the backend on where checksumming is recommended (should warnings
1147 be displayed if there is no checksum)?
1148 """
1149 return False
1150
1151 def _strip_leading_slashes(self, relpath):
1152 """
1153 Remove leading slash as os.path.join can't cope
1154 """
1155 while os.path.isabs(relpath):
1156 relpath = relpath[1:]
1157 return relpath
1158
1159 def setUrls(self, urls):
1160 self.__urls = urls
1161
1162 def getUrls(self):
1163 return self.__urls
1164
1165 urls = property(getUrls, setUrls, None, "Urls property")
1166
1167 def need_update(self, ud, d):
1168 """
1169 Force a fetch, even if localpath exists?
1170 """
1171 if os.path.exists(ud.localpath):
1172 return False
1173 return True
1174
1175 def supports_srcrev(self):
1176 """
1177 The fetcher supports auto source revisions (SRCREV)
1178 """
1179 return False
1180
1181 def download(self, urldata, d):
1182 """
1183 Fetch urls
1184 Assumes localpath was called first
1185 """
1186 raise NoMethodError(url)
1187
1188 def unpack(self, urldata, rootdir, data):
1189 iterate = False
1190 file = urldata.localpath
1191
1192 try:
1193 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1194 except ValueError as exc:
1195 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1196 (file, urldata.parm.get('unpack')))
1197
1198 dots = file.split(".")
1199 if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
1200 efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
1201 else:
1202 efile = file
1203 cmd = None
1204
1205 if unpack:
1206 if file.endswith('.tar'):
1207 cmd = 'tar x --no-same-owner -f %s' % file
1208 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1209 cmd = 'tar xz --no-same-owner -f %s' % file
1210 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1211 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1212 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1213 cmd = 'gzip -dc %s > %s' % (file, efile)
1214 elif file.endswith('.bz2'):
1215 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1216 elif file.endswith('.tar.xz'):
1217 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1218 elif file.endswith('.xz'):
1219 cmd = 'xz -dc %s > %s' % (file, efile)
1220 elif file.endswith('.zip') or file.endswith('.jar'):
1221 try:
1222 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1223 except ValueError as exc:
1224 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1225 (file, urldata.parm.get('dos')))
1226 cmd = 'unzip -q -o'
1227 if dos:
1228 cmd = '%s -a' % cmd
1229 cmd = "%s '%s'" % (cmd, file)
1230 elif file.endswith('.rpm') or file.endswith('.srpm'):
1231 if 'extract' in urldata.parm:
1232 unpack_file = urldata.parm.get('extract')
1233 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1234 iterate = True
1235 iterate_file = unpack_file
1236 else:
1237 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1238 elif file.endswith('.deb') or file.endswith('.ipk'):
1239 cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
1240
1241 if not unpack or not cmd:
1242 # If file == dest, then avoid any copies, as we already put the file into dest!
1243 dest = os.path.join(rootdir, os.path.basename(file))
1244 if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1245 if os.path.isdir(file):
1246 # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
1247 basepath = getattr(urldata, "basepath", None)
1248 destdir = "."
1249 if basepath and basepath.endswith("/"):
1250 basepath = basepath.rstrip("/")
1251 elif basepath:
1252 basepath = os.path.dirname(basepath)
1253 if basepath and basepath.find("/") != -1:
1254 destdir = basepath[:basepath.rfind('/')]
1255 destdir = destdir.strip('/')
1256 if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
1257 os.makedirs("%s/%s" % (rootdir, destdir))
1258 cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir)
1259 #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
1260 else:
1261 # The "destdir" handling was specifically done for FILESPATH
1262 # items. So, only do so for file:// entries.
1263 if urldata.type == "file" and urldata.path.find("/") != -1:
1264 destdir = urldata.path.rsplit("/", 1)[0]
1265 else:
1266 destdir = "."
1267 bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
1268 cmd = 'cp %s %s/%s/' % (file, rootdir, destdir)
1269
1270 if not cmd:
1271 return
1272
1273 # Change to subdir before executing command
1274 save_cwd = os.getcwd();
1275 os.chdir(rootdir)
1276 if 'subdir' in urldata.parm:
1277 newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
1278 bb.utils.mkdirhier(newdir)
1279 os.chdir(newdir)
1280
1281 path = data.getVar('PATH', True)
1282 if path:
1283 cmd = "PATH=\"%s\" %s" % (path, cmd)
1284 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
1285 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
1286
1287 os.chdir(save_cwd)
1288
1289 if ret != 0:
1290 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1291
1292 if iterate is True:
1293 iterate_urldata = urldata
1294 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1295 self.unpack(urldata, rootdir, data)
1296
1297 return
1298
1299 def clean(self, urldata, d):
1300 """
1301 Clean any existing full or partial download
1302 """
1303 bb.utils.remove(urldata.localpath)
1304
1305 def try_premirror(self, urldata, d):
1306 """
1307 Should premirrors be used?
1308 """
1309 return True
1310
1311 def checkstatus(self, urldata, d):
1312 """
1313 Check the status of a URL
1314 Assumes localpath was called first
1315 """
1316 logger.info("URL %s could not be checked for status since no method exists.", url)
1317 return True
1318
1319 def latest_revision(self, ud, d, name):
1320 """
1321 Look in the cache for the latest revision, if not present ask the SCM.
1322 """
1323 if not hasattr(self, "_latest_revision"):
1324 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1325
1326 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1327 key = self.generate_revision_key(ud, d, name)
1328 try:
1329 return revs[key]
1330 except KeyError:
1331 revs[key] = rev = self._latest_revision(ud, d, name)
1332 return rev
1333
1334 def sortable_revision(self, ud, d, name):
1335 latest_rev = self._build_revision(ud, d, name)
1336 return True, str(latest_rev)
1337
1338 def generate_revision_key(self, ud, d, name):
1339 key = self._revision_key(ud, d, name)
1340 return "%s-%s" % (key, d.getVar("PN", True) or "")
1341
1342class Fetch(object):
1343 def __init__(self, urls, d, cache = True, localonly = False):
1344 if localonly and cache:
1345 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1346
1347 if len(urls) == 0:
1348 urls = d.getVar("SRC_URI", True).split()
1349 self.urls = urls
1350 self.d = d
1351 self.ud = {}
1352
1353 fn = d.getVar('FILE', True)
1354 if cache and fn and fn in urldata_cache:
1355 self.ud = urldata_cache[fn]
1356
1357 for url in urls:
1358 if url not in self.ud:
1359 try:
1360 self.ud[url] = FetchData(url, d, localonly)
1361 except NonLocalMethod:
1362 if localonly:
1363 self.ud[url] = None
1364 pass
1365
1366 if fn and cache:
1367 urldata_cache[fn] = self.ud
1368
1369 def localpath(self, url):
1370 if url not in self.urls:
1371 self.ud[url] = FetchData(url, self.d)
1372
1373 self.ud[url].setup_localpath(self.d)
1374 return self.d.expand(self.ud[url].localpath)
1375
1376 def localpaths(self):
1377 """
1378 Return a list of the local filenames, assuming successful fetch
1379 """
1380 local = []
1381
1382 for u in self.urls:
1383 ud = self.ud[u]
1384 ud.setup_localpath(self.d)
1385 local.append(ud.localpath)
1386
1387 return local
1388
1389 def download(self, urls = []):
1390 """
1391 Fetch all urls
1392 """
1393 if len(urls) == 0:
1394 urls = self.urls
1395
1396 network = self.d.getVar("BB_NO_NETWORK", True)
1397 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1398
1399 for u in urls:
1400 ud = self.ud[u]
1401 ud.setup_localpath(self.d)
1402 m = ud.method
1403 localpath = ""
1404
1405 lf = bb.utils.lockfile(ud.lockfile)
1406
1407 try:
1408 self.d.setVar("BB_NO_NETWORK", network)
1409
1410 if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
1411 localpath = ud.localpath
1412 elif m.try_premirror(ud, self.d):
1413 logger.debug(1, "Trying PREMIRRORS")
1414 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1415 localpath = try_mirrors(self.d, ud, mirrors, False)
1416
1417 if premirroronly:
1418 self.d.setVar("BB_NO_NETWORK", "1")
1419
1420 os.chdir(self.d.getVar("DL_DIR", True))
1421
1422 firsterr = None
1423 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
1424 try:
1425 logger.debug(1, "Trying Upstream")
1426 m.download(ud, self.d)
1427 if hasattr(m, "build_mirror_data"):
1428 m.build_mirror_data(ud, self.d)
1429 localpath = ud.localpath
1430 # early checksum verify, so that if checksum mismatched,
1431 # fetcher still have chance to fetch from mirror
1432 update_stamp(ud, self.d)
1433
1434 except bb.fetch2.NetworkAccess:
1435 raise
1436
1437 except BBFetchException as e:
1438 if isinstance(e, ChecksumError):
1439 logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1440 logger.debug(1, str(e))
1441 rename_bad_checksum(ud, e.checksum)
1442 elif isinstance(e, NoChecksumError):
1443 raise
1444 else:
1445 logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1446 logger.debug(1, str(e))
1447 firsterr = e
1448 # Remove any incomplete fetch
1449 m.clean(ud, self.d)
1450 logger.debug(1, "Trying MIRRORS")
1451 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1452 localpath = try_mirrors (self.d, ud, mirrors)
1453
1454 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1455 if firsterr:
1456 logger.error(str(firsterr))
1457 raise FetchError("Unable to fetch URL from any source.", u)
1458
1459 update_stamp(ud, self.d)
1460
1461 except BBFetchException as e:
1462 if isinstance(e, ChecksumError):
1463 logger.error("Checksum failure fetching %s" % u)
1464 raise
1465
1466 finally:
1467 bb.utils.unlockfile(lf)
1468
1469 def checkstatus(self, urls = []):
1470 """
1471 Check all urls exist upstream
1472 """
1473
1474 if len(urls) == 0:
1475 urls = self.urls
1476
1477 for u in urls:
1478 ud = self.ud[u]
1479 ud.setup_localpath(self.d)
1480 m = ud.method
1481 logger.debug(1, "Testing URL %s", u)
1482 # First try checking uri, u, from PREMIRRORS
1483 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1484 ret = try_mirrors(self.d, ud, mirrors, True)
1485 if not ret:
1486 # Next try checking from the original uri, u
1487 try:
1488 ret = m.checkstatus(ud, self.d)
1489 except:
1490 # Finally, try checking uri, u, from MIRRORS
1491 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1492 ret = try_mirrors(self.d, ud, mirrors, True)
1493
1494 if not ret:
1495 raise FetchError("URL %s doesn't work" % u, u)
1496
1497 def unpack(self, root, urls = []):
1498 """
1499 Check all urls exist upstream
1500 """
1501
1502 if len(urls) == 0:
1503 urls = self.urls
1504
1505 for u in urls:
1506 ud = self.ud[u]
1507 ud.setup_localpath(self.d)
1508
1509 if self.d.expand(self.localpath) is None:
1510 continue
1511
1512 if ud.lockfile:
1513 lf = bb.utils.lockfile(ud.lockfile)
1514
1515 ud.method.unpack(ud, root, self.d)
1516
1517 if ud.lockfile:
1518 bb.utils.unlockfile(lf)
1519
1520 def clean(self, urls = []):
1521 """
1522 Clean files that the fetcher gets or places
1523 """
1524
1525 if len(urls) == 0:
1526 urls = self.urls
1527
1528 for url in urls:
1529 if url not in self.ud:
1530 self.ud[url] = FetchData(url, d)
1531 ud = self.ud[url]
1532 ud.setup_localpath(self.d)
1533
1534 if not ud.localfile and ud.localpath is None:
1535 continue
1536
1537 if ud.lockfile:
1538 lf = bb.utils.lockfile(ud.lockfile)
1539
1540 ud.method.clean(ud, self.d)
1541 if ud.donestamp:
1542 bb.utils.remove(ud.donestamp)
1543
1544 if ud.lockfile:
1545 bb.utils.unlockfile(lf)
1546
1547from . import cvs
1548from . import git
1549from . import gitsm
1550from . import gitannex
1551from . import local
1552from . import svn
1553from . import wget
1554from . import ssh
1555from . import sftp
1556from . import perforce
1557from . import bzr
1558from . import hg
1559from . import osc
1560from . import repo
1561
1562methods.append(local.Local())
1563methods.append(wget.Wget())
1564methods.append(svn.Svn())
1565methods.append(git.Git())
1566methods.append(gitsm.GitSM())
1567methods.append(gitannex.GitANNEX())
1568methods.append(cvs.Cvs())
1569methods.append(ssh.SSH())
1570methods.append(sftp.SFTP())
1571methods.append(perforce.Perforce())
1572methods.append(bzr.Bzr())
1573methods.append(hg.Hg())
1574methods.append(osc.Osc())
1575methods.append(repo.Repo())