summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1585
1 files changed, 1585 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000000..378d41e1cb
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1585 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from __future__ import absolute_import
29from __future__ import print_function
30import os, re
31import signal
32import glob
33import logging
34import urllib
35import urlparse
36import operator
37import bb.persist_data, bb.utils
38import bb.checksum
39from bb import data
40import bb.process
41import subprocess
42
43__version__ = "2"
44_checksum_cache = bb.checksum.FileChecksumCache()
45
46logger = logging.getLogger("BitBake.Fetcher")
47
48class BBFetchException(Exception):
49 """Class all fetch exceptions inherit from"""
50 def __init__(self, message):
51 self.msg = message
52 Exception.__init__(self, message)
53
54 def __str__(self):
55 return self.msg
56
57class MalformedUrl(BBFetchException):
58 """Exception raised when encountering an invalid url"""
59 def __init__(self, url, message=''):
60 if message:
61 msg = message
62 else:
63 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
64 self.url = url
65 BBFetchException.__init__(self, msg)
66 self.args = (url,)
67
68class FetchError(BBFetchException):
69 """General fetcher exception when something happens incorrectly"""
70 def __init__(self, message, url = None):
71 if url:
72 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
73 else:
74 msg = "Fetcher failure: %s" % message
75 self.url = url
76 BBFetchException.__init__(self, msg)
77 self.args = (message, url)
78
79class ChecksumError(FetchError):
80 """Exception when mismatched checksum encountered"""
81 def __init__(self, message, url = None, checksum = None):
82 self.checksum = checksum
83 FetchError.__init__(self, message, url)
84
85class NoChecksumError(FetchError):
86 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
87
88class UnpackError(BBFetchException):
89 """General fetcher exception when something happens incorrectly when unpacking"""
90 def __init__(self, message, url):
91 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
92 self.url = url
93 BBFetchException.__init__(self, msg)
94 self.args = (message, url)
95
96class NoMethodError(BBFetchException):
97 """Exception raised when there is no method to obtain a supplied url or set of urls"""
98 def __init__(self, url):
99 msg = "Could not find a fetcher which supports the URL: '%s'" % url
100 self.url = url
101 BBFetchException.__init__(self, msg)
102 self.args = (url,)
103
104class MissingParameterError(BBFetchException):
105 """Exception raised when a fetch method is missing a critical parameter in the url"""
106 def __init__(self, missing, url):
107 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
108 self.url = url
109 self.missing = missing
110 BBFetchException.__init__(self, msg)
111 self.args = (missing, url)
112
113class ParameterError(BBFetchException):
114 """Exception raised when a url cannot be proccessed due to invalid parameters."""
115 def __init__(self, message, url):
116 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
117 self.url = url
118 BBFetchException.__init__(self, msg)
119 self.args = (message, url)
120
121class NetworkAccess(BBFetchException):
122 """Exception raised when network access is disabled but it is required."""
123 def __init__(self, url, cmd):
124 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
125 self.url = url
126 self.cmd = cmd
127 BBFetchException.__init__(self, msg)
128 self.args = (url, cmd)
129
130class NonLocalMethod(Exception):
131 def __init__(self):
132 Exception.__init__(self)
133
134
135class URI(object):
136 """
137 A class representing a generic URI, with methods for
138 accessing the URI components, and stringifies to the
139 URI.
140
141 It is constructed by calling it with a URI, or setting
142 the attributes manually:
143
144 uri = URI("http://example.com/")
145
146 uri = URI()
147 uri.scheme = 'http'
148 uri.hostname = 'example.com'
149 uri.path = '/'
150
151 It has the following attributes:
152
153 * scheme (read/write)
154 * userinfo (authentication information) (read/write)
155 * username (read/write)
156 * password (read/write)
157
158 Note, password is deprecated as of RFC 3986.
159
160 * hostname (read/write)
161 * port (read/write)
162 * hostport (read only)
163 "hostname:port", if both are set, otherwise just "hostname"
164 * path (read/write)
165 * path_quoted (read/write)
166 A URI quoted version of path
167 * params (dict) (read/write)
168 * query (dict) (read/write)
169 * relative (bool) (read only)
170 True if this is a "relative URI", (e.g. file:foo.diff)
171
172 It stringifies to the URI itself.
173
174 Some notes about relative URIs: while it's specified that
175 a URI beginning with <scheme>:// should either be directly
176 followed by a hostname or a /, the old URI handling of the
177 fetch2 library did not comform to this. Therefore, this URI
178 class has some kludges to make sure that URIs are parsed in
179 a way comforming to bitbake's current usage. This URI class
180 supports the following:
181
182 file:relative/path.diff (IETF compliant)
183 git:relative/path.git (IETF compliant)
184 git:///absolute/path.git (IETF compliant)
185 file:///absolute/path.diff (IETF compliant)
186
187 file://relative/path.diff (not IETF compliant)
188
189 But it does not support the following:
190
191 file://hostname/absolute/path.diff (would be IETF compliant)
192
193 Note that the last case only applies to a list of
194 "whitelisted" schemes (currently only file://), that requires
195 its URIs to not have a network location.
196 """
197
198 _relative_schemes = ['file', 'git']
199 _netloc_forbidden = ['file']
200
201 def __init__(self, uri=None):
202 self.scheme = ''
203 self.userinfo = ''
204 self.hostname = ''
205 self.port = None
206 self._path = ''
207 self.params = {}
208 self.query = {}
209 self.relative = False
210
211 if not uri:
212 return
213
214 # We hijack the URL parameters, since the way bitbake uses
215 # them are not quite RFC compliant.
216 uri, param_str = (uri.split(";", 1) + [None])[:2]
217
218 urlp = urlparse.urlparse(uri)
219 self.scheme = urlp.scheme
220
221 reparse = 0
222
223 # Coerce urlparse to make URI scheme use netloc
224 if not self.scheme in urlparse.uses_netloc:
225 urlparse.uses_params.append(self.scheme)
226 reparse = 1
227
228 # Make urlparse happy(/ier) by converting local resources
229 # to RFC compliant URL format. E.g.:
230 # file://foo.diff -> file:foo.diff
231 if urlp.scheme in self._netloc_forbidden:
232 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
233 reparse = 1
234
235 if reparse:
236 urlp = urlparse.urlparse(uri)
237
238 # Identify if the URI is relative or not
239 if urlp.scheme in self._relative_schemes and \
240 re.compile("^\w+:(?!//)").match(uri):
241 self.relative = True
242
243 if not self.relative:
244 self.hostname = urlp.hostname or ''
245 self.port = urlp.port
246
247 self.userinfo += urlp.username or ''
248
249 if urlp.password:
250 self.userinfo += ':%s' % urlp.password
251
252 self.path = urllib.unquote(urlp.path)
253
254 if param_str:
255 self.params = self._param_str_split(param_str, ";")
256 if urlp.query:
257 self.query = self._param_str_split(urlp.query, "&")
258
259 def __str__(self):
260 userinfo = self.userinfo
261 if userinfo:
262 userinfo += '@'
263
264 return "%s:%s%s%s%s%s%s" % (
265 self.scheme,
266 '' if self.relative else '//',
267 userinfo,
268 self.hostport,
269 self.path_quoted,
270 self._query_str(),
271 self._param_str())
272
273 def _param_str(self):
274 return (
275 ''.join([';', self._param_str_join(self.params, ";")])
276 if self.params else '')
277
278 def _query_str(self):
279 return (
280 ''.join(['?', self._param_str_join(self.query, "&")])
281 if self.query else '')
282
283 def _param_str_split(self, string, elmdelim, kvdelim="="):
284 ret = {}
285 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
286 ret[k] = v
287 return ret
288
289 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
290 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
291
292 @property
293 def hostport(self):
294 if not self.port:
295 return self.hostname
296 return "%s:%d" % (self.hostname, self.port)
297
298 @property
299 def path_quoted(self):
300 return urllib.quote(self.path)
301
302 @path_quoted.setter
303 def path_quoted(self, path):
304 self.path = urllib.unquote(path)
305
306 @property
307 def path(self):
308 return self._path
309
310 @path.setter
311 def path(self, path):
312 self._path = path
313
314 if re.compile("^/").match(path):
315 self.relative = False
316 else:
317 self.relative = True
318
319 @property
320 def username(self):
321 if self.userinfo:
322 return (self.userinfo.split(":", 1))[0]
323 return ''
324
325 @username.setter
326 def username(self, username):
327 password = self.password
328 self.userinfo = username
329 if password:
330 self.userinfo += ":%s" % password
331
332 @property
333 def password(self):
334 if self.userinfo and ":" in self.userinfo:
335 return (self.userinfo.split(":", 1))[1]
336 return ''
337
338 @password.setter
339 def password(self, password):
340 self.userinfo = "%s:%s" % (self.username, password)
341
342def decodeurl(url):
343 """Decodes an URL into the tokens (scheme, network location, path,
344 user, password, parameters).
345 """
346
347 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
348 if not m:
349 raise MalformedUrl(url)
350
351 type = m.group('type')
352 location = m.group('location')
353 if not location:
354 raise MalformedUrl(url)
355 user = m.group('user')
356 parm = m.group('parm')
357
358 locidx = location.find('/')
359 if locidx != -1 and type.lower() != 'file':
360 host = location[:locidx]
361 path = location[locidx:]
362 else:
363 host = ""
364 path = location
365 if user:
366 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
367 if m:
368 user = m.group('user')
369 pswd = m.group('pswd')
370 else:
371 user = ''
372 pswd = ''
373
374 p = {}
375 if parm:
376 for s in parm.split(';'):
377 if s:
378 if not '=' in s:
379 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
380 s1, s2 = s.split('=')
381 p[s1] = s2
382
383 return type, host, urllib.unquote(path), user, pswd, p
384
385def encodeurl(decoded):
386 """Encodes a URL from tokens (scheme, network location, path,
387 user, password, parameters).
388 """
389
390 type, host, path, user, pswd, p = decoded
391
392 if not path:
393 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
394 if not type:
395 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
396 url = '%s://' % type
397 if user and type != "file":
398 url += "%s" % user
399 if pswd:
400 url += ":%s" % pswd
401 url += "@"
402 if host and type != "file":
403 url += "%s" % host
404 # Standardise path to ensure comparisons work
405 while '//' in path:
406 path = path.replace("//", "/")
407 url += "%s" % urllib.quote(path)
408 if p:
409 for parm in p:
410 url += ";%s=%s" % (parm, p[parm])
411
412 return url
413
414def uri_replace(ud, uri_find, uri_replace, replacements, d):
415 if not ud.url or not uri_find or not uri_replace:
416 logger.error("uri_replace: passed an undefined value, not replacing")
417 return None
418 uri_decoded = list(decodeurl(ud.url))
419 uri_find_decoded = list(decodeurl(uri_find))
420 uri_replace_decoded = list(decodeurl(uri_replace))
421 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
422 result_decoded = ['', '', '', '', '', {}]
423 for loc, i in enumerate(uri_find_decoded):
424 result_decoded[loc] = uri_decoded[loc]
425 regexp = i
426 if loc == 0 and regexp and not regexp.endswith("$"):
427 # Leaving the type unanchored can mean "https" matching "file" can become "files"
428 # which is clearly undesirable.
429 regexp += "$"
430 if loc == 5:
431 # Handle URL parameters
432 if i:
433 # Any specified URL parameters must match
434 for k in uri_replace_decoded[loc]:
435 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
436 return None
437 # Overwrite any specified replacement parameters
438 for k in uri_replace_decoded[loc]:
439 for l in replacements:
440 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
441 result_decoded[loc][k] = uri_replace_decoded[loc][k]
442 elif (re.match(regexp, uri_decoded[loc])):
443 if not uri_replace_decoded[loc]:
444 result_decoded[loc] = ""
445 else:
446 for k in replacements:
447 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
448 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
449 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
450 if loc == 2:
451 # Handle path manipulations
452 basename = None
453 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
454 # If the source and destination url types differ, must be a mirrortarball mapping
455 basename = os.path.basename(ud.mirrortarball)
456 # Kill parameters, they make no sense for mirror tarballs
457 uri_decoded[5] = {}
458 elif ud.localpath and ud.method.supports_checksum(ud):
459 basename = os.path.basename(ud.localpath)
460 if basename and not result_decoded[loc].endswith(basename):
461 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
462 else:
463 return None
464 result = encodeurl(result_decoded)
465 if result == ud.url:
466 return None
467 logger.debug(2, "For url %s returning %s" % (ud.url, result))
468 return result
469
470methods = []
471urldata_cache = {}
472saved_headrevs = {}
473
474def fetcher_init(d):
475 """
476 Called to initialize the fetchers once the configuration data is known.
477 Calls before this must not hit the cache.
478 """
479 # When to drop SCM head revisions controlled by user policy
480 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
481 if srcrev_policy == "cache":
482 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
483 elif srcrev_policy == "clear":
484 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
485 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
486 try:
487 bb.fetch2.saved_headrevs = revs.items()
488 except:
489 pass
490 revs.clear()
491 else:
492 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
493
494 _checksum_cache.init_cache(d)
495
496 for m in methods:
497 if hasattr(m, "init"):
498 m.init(d)
499
500def fetcher_parse_save(d):
501 _checksum_cache.save_extras(d)
502
503def fetcher_parse_done(d):
504 _checksum_cache.save_merge(d)
505
506def fetcher_compare_revisions(d):
507 """
508 Compare the revisions in the persistant cache with current values and
509 return true/false on whether they've changed.
510 """
511
512 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
513 data2 = bb.fetch2.saved_headrevs
514
515 changed = False
516 for key in data:
517 if key not in data2 or data2[key] != data[key]:
518 logger.debug(1, "%s changed", key)
519 changed = True
520 return True
521 else:
522 logger.debug(2, "%s did not change", key)
523 return False
524
525def mirror_from_string(data):
526 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
527
528def verify_checksum(ud, d):
529 """
530 verify the MD5 and SHA256 checksum for downloaded src
531
532 Raises a FetchError if one or both of the SRC_URI checksums do not match
533 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
534 checksums specified.
535
536 """
537
538 if not ud.method.supports_checksum(ud):
539 return
540
541 md5data = bb.utils.md5_file(ud.localpath)
542 sha256data = bb.utils.sha256_file(ud.localpath)
543
544 if ud.method.recommends_checksum(ud):
545 # If strict checking enabled and neither sum defined, raise error
546 strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
547 if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
548 logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
549 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
550 (ud.localpath, ud.md5_name, md5data,
551 ud.sha256_name, sha256data))
552 raise NoChecksumError('Missing SRC_URI checksum', ud.url)
553
554 # Log missing sums so user can more easily add them
555 if not ud.md5_expected:
556 logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
557 'SRC_URI[%s] = "%s"',
558 ud.localpath, ud.md5_name, md5data)
559
560 if not ud.sha256_expected:
561 logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
562 'SRC_URI[%s] = "%s"',
563 ud.localpath, ud.sha256_name, sha256data)
564
565 md5mismatch = False
566 sha256mismatch = False
567
568 if ud.md5_expected != md5data:
569 md5mismatch = True
570
571 if ud.sha256_expected != sha256data:
572 sha256mismatch = True
573
574 # We want to alert the user if a checksum is defined in the recipe but
575 # it does not match.
576 msg = ""
577 mismatch = False
578 if md5mismatch and ud.md5_expected:
579 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
580 mismatch = True;
581
582 if sha256mismatch and ud.sha256_expected:
583 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
584 mismatch = True;
585
586 if mismatch:
587 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
588
589 if len(msg):
590 raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
591
592
593def update_stamp(ud, d):
594 """
595 donestamp is file stamp indicating the whole fetching is done
596 this function update the stamp after verifying the checksum
597 """
598 if os.path.exists(ud.donestamp):
599 # Touch the done stamp file to show active use of the download
600 try:
601 os.utime(ud.donestamp, None)
602 except:
603 # Errors aren't fatal here
604 pass
605 else:
606 verify_checksum(ud, d)
607 open(ud.donestamp, 'w').close()
608
609def subprocess_setup():
610 # Python installs a SIGPIPE handler by default. This is usually not what
611 # non-Python subprocesses expect.
612 # SIGPIPE errors are known issues with gzip/bash
613 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
614
615def get_autorev(d):
616 # only not cache src rev in autorev case
617 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
618 d.setVar('__BB_DONT_CACHE', '1')
619 return "AUTOINC"
620
621def get_srcrev(d):
622 """
623 Return the version string for the current package
624 (usually to be used as PV)
625 Most packages usually only have one SCM so we just pass on the call.
626 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
627 have been set.
628 """
629
630 scms = []
631 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
632 urldata = fetcher.ud
633 for u in urldata:
634 if urldata[u].method.supports_srcrev():
635 scms.append(u)
636
637 if len(scms) == 0:
638 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
639
640 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
641 autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
642 if len(rev) > 10:
643 rev = rev[:10]
644 if autoinc:
645 return "AUTOINC+" + rev
646 return rev
647
648 #
649 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
650 #
651 format = d.getVar('SRCREV_FORMAT', True)
652 if not format:
653 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
654
655 seenautoinc = False
656 for scm in scms:
657 ud = urldata[scm]
658 for name in ud.names:
659 autoinc, rev = ud.method.sortable_revision(ud, d, name)
660 seenautoinc = seenautoinc or autoinc
661 if len(rev) > 10:
662 rev = rev[:10]
663 format = format.replace(name, rev)
664 if seenautoinc:
665 format = "AUTOINC+" + format
666
667 return format
668
669def localpath(url, d):
670 fetcher = bb.fetch2.Fetch([url], d)
671 return fetcher.localpath(url)
672
673def runfetchcmd(cmd, d, quiet = False, cleanup = []):
674 """
675 Run cmd returning the command output
676 Raise an error if interrupted or cmd fails
677 Optionally echo command output to stdout
678 Optionally remove the files/directories listed in cleanup upon failure
679 """
680
681 # Need to export PATH as binary could be in metadata paths
682 # rather than host provided
683 # Also include some other variables.
684 # FIXME: Should really include all export varaiables?
685 exportvars = ['HOME', 'PATH',
686 'HTTP_PROXY', 'http_proxy',
687 'HTTPS_PROXY', 'https_proxy',
688 'FTP_PROXY', 'ftp_proxy',
689 'FTPS_PROXY', 'ftps_proxy',
690 'NO_PROXY', 'no_proxy',
691 'ALL_PROXY', 'all_proxy',
692 'GIT_PROXY_COMMAND',
693 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
694 'SOCKS5_USER', 'SOCKS5_PASSWD']
695
696 for var in exportvars:
697 val = d.getVar(var, True)
698 if val:
699 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
700
701 logger.debug(1, "Running %s", cmd)
702
703 success = False
704 error_message = ""
705
706 try:
707 (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
708 success = True
709 except bb.process.NotFoundError as e:
710 error_message = "Fetch command %s" % (e.command)
711 except bb.process.ExecutionError as e:
712 if e.stdout:
713 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
714 elif e.stderr:
715 output = "output:\n%s" % e.stderr
716 else:
717 output = "no output"
718 error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
719 except bb.process.CmdError as e:
720 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
721 if not success:
722 for f in cleanup:
723 try:
724 bb.utils.remove(f, True)
725 except OSError:
726 pass
727
728 raise FetchError(error_message)
729
730 return output
731
732def check_network_access(d, info = "", url = None):
733 """
734 log remote network access, and error if BB_NO_NETWORK is set
735 """
736 if d.getVar("BB_NO_NETWORK", True) == "1":
737 raise NetworkAccess(url, info)
738 else:
739 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
740
741def build_mirroruris(origud, mirrors, ld):
742 uris = []
743 uds = []
744
745 replacements = {}
746 replacements["TYPE"] = origud.type
747 replacements["HOST"] = origud.host
748 replacements["PATH"] = origud.path
749 replacements["BASENAME"] = origud.path.split("/")[-1]
750 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
751
752 def adduri(ud, uris, uds):
753 for line in mirrors:
754 try:
755 (find, replace) = line
756 except ValueError:
757 continue
758 newuri = uri_replace(ud, find, replace, replacements, ld)
759 if not newuri or newuri in uris or newuri == origud.url:
760 continue
761 try:
762 newud = FetchData(newuri, ld)
763 newud.setup_localpath(ld)
764 except bb.fetch2.BBFetchException as e:
765 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
766 logger.debug(1, str(e))
767 try:
768 ud.method.clean(ud, ld)
769 except UnboundLocalError:
770 pass
771 continue
772 uris.append(newuri)
773 uds.append(newud)
774
775 adduri(newud, uris, uds)
776
777 adduri(origud, uris, uds)
778
779 return uris, uds
780
781def rename_bad_checksum(ud, suffix):
782 """
783 Renames files to have suffix from parameter
784 """
785
786 if ud.localpath is None:
787 return
788
789 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
790 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
791 bb.utils.movefile(ud.localpath, new_localpath)
792
793
794def try_mirror_url(origud, ud, ld, check = False):
795 # Return of None or a value means we're finished
796 # False means try another url
797 try:
798 if check:
799 found = ud.method.checkstatus(ud, ld)
800 if found:
801 return found
802 return False
803
804 os.chdir(ld.getVar("DL_DIR", True))
805
806 if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
807 ud.method.download(ud, ld)
808 if hasattr(ud.method,"build_mirror_data"):
809 ud.method.build_mirror_data(ud, ld)
810
811 if not ud.localpath or not os.path.exists(ud.localpath):
812 return False
813
814 if ud.localpath == origud.localpath:
815 return ud.localpath
816
817 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
818 # If that tarball is a local file:// we need to provide a symlink to it
819 dldir = ld.getVar("DL_DIR", True)
820 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
821 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
822 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
823 open(ud.donestamp, 'w').close()
824 dest = os.path.join(dldir, os.path.basename(ud.localpath))
825 if not os.path.exists(dest):
826 os.symlink(ud.localpath, dest)
827 if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
828 origud.method.download(origud, ld)
829 if hasattr(origud.method,"build_mirror_data"):
830 origud.method.build_mirror_data(origud, ld)
831 return ud.localpath
832 # Otherwise the result is a local file:// and we symlink to it
833 if not os.path.exists(origud.localpath):
834 if os.path.islink(origud.localpath):
835 # Broken symbolic link
836 os.unlink(origud.localpath)
837
838 os.symlink(ud.localpath, origud.localpath)
839 update_stamp(origud, ld)
840 return ud.localpath
841
842 except bb.fetch2.NetworkAccess:
843 raise
844
845 except bb.fetch2.BBFetchException as e:
846 if isinstance(e, ChecksumError):
847 logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
848 logger.warn(str(e))
849 rename_bad_checksum(ud, e.checksum)
850 elif isinstance(e, NoChecksumError):
851 raise
852 else:
853 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
854 logger.debug(1, str(e))
855 try:
856 ud.method.clean(ud, ld)
857 except UnboundLocalError:
858 pass
859 return False
860
861def try_mirrors(d, origud, mirrors, check = False):
862 """
863 Try to use a mirrored version of the sources.
864 This method will be automatically called before the fetchers go.
865
866 d Is a bb.data instance
867 uri is the original uri we're trying to download
868 mirrors is the list of mirrors we're going to try
869 """
870 ld = d.createCopy()
871
872 uris, uds = build_mirroruris(origud, mirrors, ld)
873
874 for index, uri in enumerate(uris):
875 ret = try_mirror_url(origud, uds[index], ld, check)
876 if ret != False:
877 return ret
878 return None
879
880def srcrev_internal_helper(ud, d, name):
881 """
882 Return:
883 a) a source revision if specified
884 b) latest revision if SRCREV="AUTOINC"
885 c) None if not specified
886 """
887
888 srcrev = None
889 pn = d.getVar("PN", True)
890 attempts = []
891 if name != '' and pn:
892 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
893 if name != '':
894 attempts.append("SRCREV_%s" % name)
895 if pn:
896 attempts.append("SRCREV_pn-%s" % pn)
897 attempts.append("SRCREV")
898
899 for a in attempts:
900 srcrev = d.getVar(a, True)
901 if srcrev and srcrev != "INVALID":
902 break
903
904 if 'rev' in ud.parm and 'tag' in ud.parm:
905 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
906
907 if 'rev' in ud.parm or 'tag' in ud.parm:
908 if 'rev' in ud.parm:
909 parmrev = ud.parm['rev']
910 else:
911 parmrev = ud.parm['tag']
912 if srcrev == "INVALID" or not srcrev:
913 return parmrev
914 if srcrev != parmrev:
915 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
916 return parmrev
917
918 if srcrev == "INVALID" or not srcrev:
919 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
920 if srcrev == "AUTOINC":
921 srcrev = ud.method.latest_revision(ud, d, name)
922
923 return srcrev
924
925def get_checksum_file_list(d):
926 """ Get a list of files checksum in SRC_URI
927
928 Returns the resolved local paths of all local file entries in
929 SRC_URI as a space-separated string
930 """
931 fetch = Fetch([], d, cache = False, localonly = True)
932
933 dl_dir = d.getVar('DL_DIR', True)
934 filelist = []
935 for u in fetch.urls:
936 ud = fetch.ud[u]
937
938 if ud and isinstance(ud.method, local.Local):
939 paths = ud.method.localpaths(ud, d)
940 for f in paths:
941 pth = ud.decodedurl
942 if '*' in pth:
943 f = os.path.join(os.path.abspath(f), pth)
944 if f.startswith(dl_dir):
945 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
946 if os.path.exists(f):
947 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
948 else:
949 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
950 filelist.append(f + ":" + str(os.path.exists(f)))
951
952 return " ".join(filelist)
953
954def get_file_checksums(filelist, pn):
955 """Get a list of the checksums for a list of local files
956
957 Returns the checksums for a list of local files, caching the results as
958 it proceeds
959
960 """
961
962 def checksum_file(f):
963 try:
964 checksum = _checksum_cache.get_checksum(f)
965 except OSError as e:
966 bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
967 return None
968 return checksum
969
970 def checksum_dir(pth):
971 # Handle directories recursively
972 dirchecksums = []
973 for root, dirs, files in os.walk(pth):
974 for name in files:
975 fullpth = os.path.join(root, name)
976 checksum = checksum_file(fullpth)
977 if checksum:
978 dirchecksums.append((fullpth, checksum))
979 return dirchecksums
980
981 checksums = []
982 for pth in filelist.split():
983 exist = pth.split(":")[1]
984 if exist == "False":
985 continue
986 pth = pth.split(":")[0]
987 if '*' in pth:
988 # Handle globs
989 for f in glob.glob(pth):
990 if os.path.isdir(f):
991 checksums.extend(checksum_dir(f))
992 else:
993 checksum = checksum_file(f)
994 checksums.append((f, checksum))
995 elif os.path.isdir(pth):
996 checksums.extend(checksum_dir(pth))
997 else:
998 checksum = checksum_file(pth)
999 checksums.append((pth, checksum))
1000
1001 checksums.sort(key=operator.itemgetter(1))
1002 return checksums
1003
1004
1005class FetchData(object):
1006 """
1007 A class which represents the fetcher state for a given URI.
1008 """
1009 def __init__(self, url, d, localonly = False):
1010 # localpath is the location of a downloaded result. If not set, the file is local.
1011 self.donestamp = None
1012 self.localfile = ""
1013 self.localpath = None
1014 self.lockfile = None
1015 self.mirrortarball = None
1016 self.basename = None
1017 self.basepath = None
1018 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
1019 self.date = self.getSRCDate(d)
1020 self.url = url
1021 if not self.user and "user" in self.parm:
1022 self.user = self.parm["user"]
1023 if not self.pswd and "pswd" in self.parm:
1024 self.pswd = self.parm["pswd"]
1025 self.setup = False
1026
1027 if "name" in self.parm:
1028 self.md5_name = "%s.md5sum" % self.parm["name"]
1029 self.sha256_name = "%s.sha256sum" % self.parm["name"]
1030 else:
1031 self.md5_name = "md5sum"
1032 self.sha256_name = "sha256sum"
1033 if self.md5_name in self.parm:
1034 self.md5_expected = self.parm[self.md5_name]
1035 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1036 self.md5_expected = None
1037 else:
1038 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
1039 if self.sha256_name in self.parm:
1040 self.sha256_expected = self.parm[self.sha256_name]
1041 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1042 self.sha256_expected = None
1043 else:
1044 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
1045
1046 self.names = self.parm.get("name",'default').split(',')
1047
1048 self.method = None
1049 for m in methods:
1050 if m.supports(self, d):
1051 self.method = m
1052 break
1053
1054 if not self.method:
1055 raise NoMethodError(url)
1056
1057 if localonly and not isinstance(self.method, local.Local):
1058 raise NonLocalMethod()
1059
1060 if self.parm.get("proto", None) and "protocol" not in self.parm:
1061 logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
1062 self.parm["protocol"] = self.parm.get("proto", None)
1063
1064 if hasattr(self.method, "urldata_init"):
1065 self.method.urldata_init(self, d)
1066
1067 if "localpath" in self.parm:
1068 # if user sets localpath for file, use it instead.
1069 self.localpath = self.parm["localpath"]
1070 self.basename = os.path.basename(self.localpath)
1071 elif self.localfile:
1072 self.localpath = self.method.localpath(self, d)
1073
1074 dldir = d.getVar("DL_DIR", True)
1075 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1076 if self.localpath and self.localpath.startswith(dldir):
1077 basepath = self.localpath
1078 elif self.localpath:
1079 basepath = dldir + os.sep + os.path.basename(self.localpath)
1080 else:
1081 basepath = dldir + os.sep + (self.basepath or self.basename)
1082 self.donestamp = basepath + '.done'
1083 self.lockfile = basepath + '.lock'
1084
1085 def setup_revisons(self, d):
1086 self.revisions = {}
1087 for name in self.names:
1088 self.revisions[name] = srcrev_internal_helper(self, d, name)
1089
1090 # add compatibility code for non name specified case
1091 if len(self.names) == 1:
1092 self.revision = self.revisions[self.names[0]]
1093
1094 def setup_localpath(self, d):
1095 if not self.localpath:
1096 self.localpath = self.method.localpath(self, d)
1097
1098 def getSRCDate(self, d):
1099 """
1100 Return the SRC Date for the component
1101
1102 d the bb.data module
1103 """
1104 if "srcdate" in self.parm:
1105 return self.parm['srcdate']
1106
1107 pn = d.getVar("PN", True)
1108
1109 if pn:
1110 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1111
1112 return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1113
1114class FetchMethod(object):
1115 """Base class for 'fetch'ing data"""
1116
1117 def __init__(self, urls = []):
1118 self.urls = []
1119
1120 def supports(self, urldata, d):
1121 """
1122 Check to see if this fetch class supports a given url.
1123 """
1124 return 0
1125
1126 def localpath(self, urldata, d):
1127 """
1128 Return the local filename of a given url assuming a successful fetch.
1129 Can also setup variables in urldata for use in go (saving code duplication
1130 and duplicate code execution)
1131 """
1132 return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
1133
1134 def supports_checksum(self, urldata):
1135 """
1136 Is localpath something that can be represented by a checksum?
1137 """
1138
1139 # We cannot compute checksums for directories
1140 if os.path.isdir(urldata.localpath) == True:
1141 return False
1142 if urldata.localpath.find("*") != -1:
1143 return False
1144
1145 return True
1146
1147 def recommends_checksum(self, urldata):
1148 """
1149 Is the backend on where checksumming is recommended (should warnings
1150 be displayed if there is no checksum)?
1151 """
1152 return False
1153
1154 def _strip_leading_slashes(self, relpath):
1155 """
1156 Remove leading slash as os.path.join can't cope
1157 """
1158 while os.path.isabs(relpath):
1159 relpath = relpath[1:]
1160 return relpath
1161
1162 def setUrls(self, urls):
1163 self.__urls = urls
1164
1165 def getUrls(self):
1166 return self.__urls
1167
1168 urls = property(getUrls, setUrls, None, "Urls property")
1169
1170 def need_update(self, ud, d):
1171 """
1172 Force a fetch, even if localpath exists?
1173 """
1174 if os.path.exists(ud.localpath):
1175 return False
1176 return True
1177
1178 def supports_srcrev(self):
1179 """
1180 The fetcher supports auto source revisions (SRCREV)
1181 """
1182 return False
1183
1184 def download(self, urldata, d):
1185 """
1186 Fetch urls
1187 Assumes localpath was called first
1188 """
1189 raise NoMethodError(url)
1190
1191 def unpack(self, urldata, rootdir, data):
1192 iterate = False
1193 file = urldata.localpath
1194
1195 try:
1196 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1197 except ValueError as exc:
1198 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1199 (file, urldata.parm.get('unpack')))
1200
1201 dots = file.split(".")
1202 if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
1203 efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
1204 else:
1205 efile = file
1206 cmd = None
1207
1208 if unpack:
1209 if file.endswith('.tar'):
1210 cmd = 'tar x --no-same-owner -f %s' % file
1211 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1212 cmd = 'tar xz --no-same-owner -f %s' % file
1213 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1214 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1215 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1216 cmd = 'gzip -dc %s > %s' % (file, efile)
1217 elif file.endswith('.bz2'):
1218 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1219 elif file.endswith('.tar.xz'):
1220 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1221 elif file.endswith('.xz'):
1222 cmd = 'xz -dc %s > %s' % (file, efile)
1223 elif file.endswith('.zip') or file.endswith('.jar'):
1224 try:
1225 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1226 except ValueError as exc:
1227 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1228 (file, urldata.parm.get('dos')))
1229 cmd = 'unzip -q -o'
1230 if dos:
1231 cmd = '%s -a' % cmd
1232 cmd = "%s '%s'" % (cmd, file)
1233 elif file.endswith('.rpm') or file.endswith('.srpm'):
1234 if 'extract' in urldata.parm:
1235 unpack_file = urldata.parm.get('extract')
1236 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1237 iterate = True
1238 iterate_file = unpack_file
1239 else:
1240 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1241 elif file.endswith('.deb') or file.endswith('.ipk'):
1242 cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
1243
1244 if not unpack or not cmd:
1245 # If file == dest, then avoid any copies, as we already put the file into dest!
1246 dest = os.path.join(rootdir, os.path.basename(file))
1247 if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1248 if os.path.isdir(file):
1249 # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
1250 basepath = getattr(urldata, "basepath", None)
1251 destdir = "."
1252 if basepath and basepath.endswith("/"):
1253 basepath = basepath.rstrip("/")
1254 elif basepath:
1255 basepath = os.path.dirname(basepath)
1256 if basepath and basepath.find("/") != -1:
1257 destdir = basepath[:basepath.rfind('/')]
1258 destdir = destdir.strip('/')
1259 if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
1260 os.makedirs("%s/%s" % (rootdir, destdir))
1261 cmd = 'cp -fpPR %s %s/%s/' % (file, rootdir, destdir)
1262 #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
1263 else:
1264 # The "destdir" handling was specifically done for FILESPATH
1265 # items. So, only do so for file:// entries.
1266 if urldata.type == "file" and urldata.path.find("/") != -1:
1267 destdir = urldata.path.rsplit("/", 1)[0]
1268 if urldata.parm.get('subdir') != None:
1269 destdir = urldata.parm.get('subdir') + "/" + destdir
1270 else:
1271 if urldata.parm.get('subdir') != None:
1272 destdir = urldata.parm.get('subdir')
1273 else:
1274 destdir = "."
1275 bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
1276 cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir)
1277
1278 if not cmd:
1279 return
1280
1281 # Change to subdir before executing command
1282 save_cwd = os.getcwd();
1283 os.chdir(rootdir)
1284 if 'subdir' in urldata.parm:
1285 newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
1286 bb.utils.mkdirhier(newdir)
1287 os.chdir(newdir)
1288
1289 path = data.getVar('PATH', True)
1290 if path:
1291 cmd = "PATH=\"%s\" %s" % (path, cmd)
1292 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
1293 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
1294
1295 os.chdir(save_cwd)
1296
1297 if ret != 0:
1298 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1299
1300 if iterate is True:
1301 iterate_urldata = urldata
1302 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1303 self.unpack(urldata, rootdir, data)
1304
1305 return
1306
1307 def clean(self, urldata, d):
1308 """
1309 Clean any existing full or partial download
1310 """
1311 bb.utils.remove(urldata.localpath)
1312
1313 def try_premirror(self, urldata, d):
1314 """
1315 Should premirrors be used?
1316 """
1317 return True
1318
1319 def checkstatus(self, urldata, d):
1320 """
1321 Check the status of a URL
1322 Assumes localpath was called first
1323 """
1324 logger.info("URL %s could not be checked for status since no method exists.", url)
1325 return True
1326
1327 def latest_revision(self, ud, d, name):
1328 """
1329 Look in the cache for the latest revision, if not present ask the SCM.
1330 """
1331 if not hasattr(self, "_latest_revision"):
1332 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1333
1334 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1335 key = self.generate_revision_key(ud, d, name)
1336 try:
1337 return revs[key]
1338 except KeyError:
1339 revs[key] = rev = self._latest_revision(ud, d, name)
1340 return rev
1341
1342 def sortable_revision(self, ud, d, name):
1343 latest_rev = self._build_revision(ud, d, name)
1344 return True, str(latest_rev)
1345
1346 def generate_revision_key(self, ud, d, name):
1347 key = self._revision_key(ud, d, name)
1348 return "%s-%s" % (key, d.getVar("PN", True) or "")
1349
1350class Fetch(object):
1351 def __init__(self, urls, d, cache = True, localonly = False):
1352 if localonly and cache:
1353 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1354
1355 if len(urls) == 0:
1356 urls = d.getVar("SRC_URI", True).split()
1357 self.urls = urls
1358 self.d = d
1359 self.ud = {}
1360
1361 fn = d.getVar('FILE', True)
1362 if cache and fn and fn in urldata_cache:
1363 self.ud = urldata_cache[fn]
1364
1365 for url in urls:
1366 if url not in self.ud:
1367 try:
1368 self.ud[url] = FetchData(url, d, localonly)
1369 except NonLocalMethod:
1370 if localonly:
1371 self.ud[url] = None
1372 pass
1373
1374 if fn and cache:
1375 urldata_cache[fn] = self.ud
1376
1377 def localpath(self, url):
1378 if url not in self.urls:
1379 self.ud[url] = FetchData(url, self.d)
1380
1381 self.ud[url].setup_localpath(self.d)
1382 return self.d.expand(self.ud[url].localpath)
1383
1384 def localpaths(self):
1385 """
1386 Return a list of the local filenames, assuming successful fetch
1387 """
1388 local = []
1389
1390 for u in self.urls:
1391 ud = self.ud[u]
1392 ud.setup_localpath(self.d)
1393 local.append(ud.localpath)
1394
1395 return local
1396
1397 def download(self, urls = []):
1398 """
1399 Fetch all urls
1400 """
1401 if len(urls) == 0:
1402 urls = self.urls
1403
1404 network = self.d.getVar("BB_NO_NETWORK", True)
1405 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1406
1407 for u in urls:
1408 ud = self.ud[u]
1409 ud.setup_localpath(self.d)
1410 m = ud.method
1411 localpath = ""
1412
1413 lf = bb.utils.lockfile(ud.lockfile)
1414
1415 try:
1416 self.d.setVar("BB_NO_NETWORK", network)
1417
1418 if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
1419 localpath = ud.localpath
1420 elif m.try_premirror(ud, self.d):
1421 logger.debug(1, "Trying PREMIRRORS")
1422 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1423 localpath = try_mirrors(self.d, ud, mirrors, False)
1424
1425 if premirroronly:
1426 self.d.setVar("BB_NO_NETWORK", "1")
1427
1428 os.chdir(self.d.getVar("DL_DIR", True))
1429
1430 firsterr = None
1431 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
1432 try:
1433 logger.debug(1, "Trying Upstream")
1434 m.download(ud, self.d)
1435 if hasattr(m, "build_mirror_data"):
1436 m.build_mirror_data(ud, self.d)
1437 localpath = ud.localpath
1438 # early checksum verify, so that if checksum mismatched,
1439 # fetcher still have chance to fetch from mirror
1440 update_stamp(ud, self.d)
1441
1442 except bb.fetch2.NetworkAccess:
1443 raise
1444
1445 except BBFetchException as e:
1446 if isinstance(e, ChecksumError):
1447 logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1448 logger.debug(1, str(e))
1449 rename_bad_checksum(ud, e.checksum)
1450 elif isinstance(e, NoChecksumError):
1451 raise
1452 else:
1453 logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1454 logger.debug(1, str(e))
1455 firsterr = e
1456 # Remove any incomplete fetch
1457 m.clean(ud, self.d)
1458 logger.debug(1, "Trying MIRRORS")
1459 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1460 localpath = try_mirrors (self.d, ud, mirrors)
1461
1462 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1463 if firsterr:
1464 logger.error(str(firsterr))
1465 raise FetchError("Unable to fetch URL from any source.", u)
1466
1467 update_stamp(ud, self.d)
1468
1469 except BBFetchException as e:
1470 if isinstance(e, ChecksumError):
1471 logger.error("Checksum failure fetching %s" % u)
1472 raise
1473
1474 finally:
1475 bb.utils.unlockfile(lf)
1476
1477 def checkstatus(self, urls = []):
1478 """
1479 Check all urls exist upstream
1480 """
1481
1482 if len(urls) == 0:
1483 urls = self.urls
1484
1485 for u in urls:
1486 ud = self.ud[u]
1487 ud.setup_localpath(self.d)
1488 m = ud.method
1489 logger.debug(1, "Testing URL %s", u)
1490 # First try checking uri, u, from PREMIRRORS
1491 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1492 ret = try_mirrors(self.d, ud, mirrors, True)
1493 if not ret:
1494 # Next try checking from the original uri, u
1495 try:
1496 ret = m.checkstatus(ud, self.d)
1497 except:
1498 # Finally, try checking uri, u, from MIRRORS
1499 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1500 ret = try_mirrors(self.d, ud, mirrors, True)
1501
1502 if not ret:
1503 raise FetchError("URL %s doesn't work" % u, u)
1504
1505 def unpack(self, root, urls = []):
1506 """
1507 Check all urls exist upstream
1508 """
1509
1510 if len(urls) == 0:
1511 urls = self.urls
1512
1513 for u in urls:
1514 ud = self.ud[u]
1515 ud.setup_localpath(self.d)
1516
1517 if self.d.expand(self.localpath) is None:
1518 continue
1519
1520 if ud.lockfile:
1521 lf = bb.utils.lockfile(ud.lockfile)
1522
1523 ud.method.unpack(ud, root, self.d)
1524
1525 if ud.lockfile:
1526 bb.utils.unlockfile(lf)
1527
1528 def clean(self, urls = []):
1529 """
1530 Clean files that the fetcher gets or places
1531 """
1532
1533 if len(urls) == 0:
1534 urls = self.urls
1535
1536 for url in urls:
1537 if url not in self.ud:
1538 self.ud[url] = FetchData(url, d)
1539 ud = self.ud[url]
1540 ud.setup_localpath(self.d)
1541
1542 if not ud.localfile and ud.localpath is None:
1543 continue
1544
1545 if ud.lockfile:
1546 lf = bb.utils.lockfile(ud.lockfile)
1547
1548 ud.method.clean(ud, self.d)
1549 if ud.donestamp:
1550 bb.utils.remove(ud.donestamp)
1551
1552 if ud.lockfile:
1553 bb.utils.unlockfile(lf)
1554
1555from . import cvs
1556from . import git
1557from . import gitsm
1558from . import gitannex
1559from . import local
1560from . import svn
1561from . import wget
1562from . import ssh
1563from . import sftp
1564from . import perforce
1565from . import bzr
1566from . import hg
1567from . import osc
1568from . import repo
1569from . import clearcase
1570
1571methods.append(local.Local())
1572methods.append(wget.Wget())
1573methods.append(svn.Svn())
1574methods.append(git.Git())
1575methods.append(gitsm.GitSM())
1576methods.append(gitannex.GitANNEX())
1577methods.append(cvs.Cvs())
1578methods.append(ssh.SSH())
1579methods.append(sftp.SFTP())
1580methods.append(perforce.Perforce())
1581methods.append(bzr.Bzr())
1582methods.append(hg.Hg())
1583methods.append(osc.Osc())
1584methods.append(repo.Repo())
1585methods.append(clearcase.ClearCase())