summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1585
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py143
-rw-r--r--bitbake/lib/bb/fetch2/clearcase.py263
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py171
-rw-r--r--bitbake/lib/bb/fetch2/git.py358
-rw-r--r--bitbake/lib/bb/fetch2/gitannex.py76
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py136
-rw-r--r--bitbake/lib/bb/fetch2/hg.py193
-rw-r--r--bitbake/lib/bb/fetch2/local.py128
-rw-r--r--bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py187
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py129
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py127
-rw-r--r--bitbake/lib/bb/fetch2/svn.py192
-rw-r--r--bitbake/lib/bb/fetch2/wget.py106
16 files changed, 4027 insertions, 0 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 0000000000..378d41e1cb
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1585 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11# Copyright (C) 2012 Intel Corporation
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from __future__ import absolute_import
29from __future__ import print_function
30import os, re
31import signal
32import glob
33import logging
34import urllib
35import urlparse
36import operator
37import bb.persist_data, bb.utils
38import bb.checksum
39from bb import data
40import bb.process
41import subprocess
42
43__version__ = "2"
44_checksum_cache = bb.checksum.FileChecksumCache()
45
46logger = logging.getLogger("BitBake.Fetcher")
47
48class BBFetchException(Exception):
49 """Class all fetch exceptions inherit from"""
50 def __init__(self, message):
51 self.msg = message
52 Exception.__init__(self, message)
53
54 def __str__(self):
55 return self.msg
56
57class MalformedUrl(BBFetchException):
58 """Exception raised when encountering an invalid url"""
59 def __init__(self, url, message=''):
60 if message:
61 msg = message
62 else:
63 msg = "The URL: '%s' is invalid and cannot be interpreted" % url
64 self.url = url
65 BBFetchException.__init__(self, msg)
66 self.args = (url,)
67
68class FetchError(BBFetchException):
69 """General fetcher exception when something happens incorrectly"""
70 def __init__(self, message, url = None):
71 if url:
72 msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
73 else:
74 msg = "Fetcher failure: %s" % message
75 self.url = url
76 BBFetchException.__init__(self, msg)
77 self.args = (message, url)
78
79class ChecksumError(FetchError):
80 """Exception when mismatched checksum encountered"""
81 def __init__(self, message, url = None, checksum = None):
82 self.checksum = checksum
83 FetchError.__init__(self, message, url)
84
85class NoChecksumError(FetchError):
86 """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
87
88class UnpackError(BBFetchException):
89 """General fetcher exception when something happens incorrectly when unpacking"""
90 def __init__(self, message, url):
91 msg = "Unpack failure for URL: '%s'. %s" % (url, message)
92 self.url = url
93 BBFetchException.__init__(self, msg)
94 self.args = (message, url)
95
96class NoMethodError(BBFetchException):
97 """Exception raised when there is no method to obtain a supplied url or set of urls"""
98 def __init__(self, url):
99 msg = "Could not find a fetcher which supports the URL: '%s'" % url
100 self.url = url
101 BBFetchException.__init__(self, msg)
102 self.args = (url,)
103
104class MissingParameterError(BBFetchException):
105 """Exception raised when a fetch method is missing a critical parameter in the url"""
106 def __init__(self, missing, url):
107 msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
108 self.url = url
109 self.missing = missing
110 BBFetchException.__init__(self, msg)
111 self.args = (missing, url)
112
113class ParameterError(BBFetchException):
114 """Exception raised when a url cannot be proccessed due to invalid parameters."""
115 def __init__(self, message, url):
116 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
117 self.url = url
118 BBFetchException.__init__(self, msg)
119 self.args = (message, url)
120
121class NetworkAccess(BBFetchException):
122 """Exception raised when network access is disabled but it is required."""
123 def __init__(self, url, cmd):
124 msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
125 self.url = url
126 self.cmd = cmd
127 BBFetchException.__init__(self, msg)
128 self.args = (url, cmd)
129
130class NonLocalMethod(Exception):
131 def __init__(self):
132 Exception.__init__(self)
133
134
135class URI(object):
136 """
137 A class representing a generic URI, with methods for
138 accessing the URI components, and stringifies to the
139 URI.
140
141 It is constructed by calling it with a URI, or setting
142 the attributes manually:
143
144 uri = URI("http://example.com/")
145
146 uri = URI()
147 uri.scheme = 'http'
148 uri.hostname = 'example.com'
149 uri.path = '/'
150
151 It has the following attributes:
152
153 * scheme (read/write)
154 * userinfo (authentication information) (read/write)
155 * username (read/write)
156 * password (read/write)
157
158 Note, password is deprecated as of RFC 3986.
159
160 * hostname (read/write)
161 * port (read/write)
162 * hostport (read only)
163 "hostname:port", if both are set, otherwise just "hostname"
164 * path (read/write)
165 * path_quoted (read/write)
166 A URI quoted version of path
167 * params (dict) (read/write)
168 * query (dict) (read/write)
169 * relative (bool) (read only)
170 True if this is a "relative URI", (e.g. file:foo.diff)
171
172 It stringifies to the URI itself.
173
174 Some notes about relative URIs: while it's specified that
175 a URI beginning with <scheme>:// should either be directly
176 followed by a hostname or a /, the old URI handling of the
177 fetch2 library did not comform to this. Therefore, this URI
178 class has some kludges to make sure that URIs are parsed in
179 a way comforming to bitbake's current usage. This URI class
180 supports the following:
181
182 file:relative/path.diff (IETF compliant)
183 git:relative/path.git (IETF compliant)
184 git:///absolute/path.git (IETF compliant)
185 file:///absolute/path.diff (IETF compliant)
186
187 file://relative/path.diff (not IETF compliant)
188
189 But it does not support the following:
190
191 file://hostname/absolute/path.diff (would be IETF compliant)
192
193 Note that the last case only applies to a list of
194 "whitelisted" schemes (currently only file://), that requires
195 its URIs to not have a network location.
196 """
197
198 _relative_schemes = ['file', 'git']
199 _netloc_forbidden = ['file']
200
201 def __init__(self, uri=None):
202 self.scheme = ''
203 self.userinfo = ''
204 self.hostname = ''
205 self.port = None
206 self._path = ''
207 self.params = {}
208 self.query = {}
209 self.relative = False
210
211 if not uri:
212 return
213
214 # We hijack the URL parameters, since the way bitbake uses
215 # them are not quite RFC compliant.
216 uri, param_str = (uri.split(";", 1) + [None])[:2]
217
218 urlp = urlparse.urlparse(uri)
219 self.scheme = urlp.scheme
220
221 reparse = 0
222
223 # Coerce urlparse to make URI scheme use netloc
224 if not self.scheme in urlparse.uses_netloc:
225 urlparse.uses_params.append(self.scheme)
226 reparse = 1
227
228 # Make urlparse happy(/ier) by converting local resources
229 # to RFC compliant URL format. E.g.:
230 # file://foo.diff -> file:foo.diff
231 if urlp.scheme in self._netloc_forbidden:
232 uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
233 reparse = 1
234
235 if reparse:
236 urlp = urlparse.urlparse(uri)
237
238 # Identify if the URI is relative or not
239 if urlp.scheme in self._relative_schemes and \
240 re.compile("^\w+:(?!//)").match(uri):
241 self.relative = True
242
243 if not self.relative:
244 self.hostname = urlp.hostname or ''
245 self.port = urlp.port
246
247 self.userinfo += urlp.username or ''
248
249 if urlp.password:
250 self.userinfo += ':%s' % urlp.password
251
252 self.path = urllib.unquote(urlp.path)
253
254 if param_str:
255 self.params = self._param_str_split(param_str, ";")
256 if urlp.query:
257 self.query = self._param_str_split(urlp.query, "&")
258
259 def __str__(self):
260 userinfo = self.userinfo
261 if userinfo:
262 userinfo += '@'
263
264 return "%s:%s%s%s%s%s%s" % (
265 self.scheme,
266 '' if self.relative else '//',
267 userinfo,
268 self.hostport,
269 self.path_quoted,
270 self._query_str(),
271 self._param_str())
272
273 def _param_str(self):
274 return (
275 ''.join([';', self._param_str_join(self.params, ";")])
276 if self.params else '')
277
278 def _query_str(self):
279 return (
280 ''.join(['?', self._param_str_join(self.query, "&")])
281 if self.query else '')
282
283 def _param_str_split(self, string, elmdelim, kvdelim="="):
284 ret = {}
285 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
286 ret[k] = v
287 return ret
288
289 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
290 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
291
292 @property
293 def hostport(self):
294 if not self.port:
295 return self.hostname
296 return "%s:%d" % (self.hostname, self.port)
297
298 @property
299 def path_quoted(self):
300 return urllib.quote(self.path)
301
302 @path_quoted.setter
303 def path_quoted(self, path):
304 self.path = urllib.unquote(path)
305
306 @property
307 def path(self):
308 return self._path
309
310 @path.setter
311 def path(self, path):
312 self._path = path
313
314 if re.compile("^/").match(path):
315 self.relative = False
316 else:
317 self.relative = True
318
319 @property
320 def username(self):
321 if self.userinfo:
322 return (self.userinfo.split(":", 1))[0]
323 return ''
324
325 @username.setter
326 def username(self, username):
327 password = self.password
328 self.userinfo = username
329 if password:
330 self.userinfo += ":%s" % password
331
332 @property
333 def password(self):
334 if self.userinfo and ":" in self.userinfo:
335 return (self.userinfo.split(":", 1))[1]
336 return ''
337
338 @password.setter
339 def password(self, password):
340 self.userinfo = "%s:%s" % (self.username, password)
341
342def decodeurl(url):
343 """Decodes an URL into the tokens (scheme, network location, path,
344 user, password, parameters).
345 """
346
347 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
348 if not m:
349 raise MalformedUrl(url)
350
351 type = m.group('type')
352 location = m.group('location')
353 if not location:
354 raise MalformedUrl(url)
355 user = m.group('user')
356 parm = m.group('parm')
357
358 locidx = location.find('/')
359 if locidx != -1 and type.lower() != 'file':
360 host = location[:locidx]
361 path = location[locidx:]
362 else:
363 host = ""
364 path = location
365 if user:
366 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
367 if m:
368 user = m.group('user')
369 pswd = m.group('pswd')
370 else:
371 user = ''
372 pswd = ''
373
374 p = {}
375 if parm:
376 for s in parm.split(';'):
377 if s:
378 if not '=' in s:
379 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
380 s1, s2 = s.split('=')
381 p[s1] = s2
382
383 return type, host, urllib.unquote(path), user, pswd, p
384
385def encodeurl(decoded):
386 """Encodes a URL from tokens (scheme, network location, path,
387 user, password, parameters).
388 """
389
390 type, host, path, user, pswd, p = decoded
391
392 if not path:
393 raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
394 if not type:
395 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
396 url = '%s://' % type
397 if user and type != "file":
398 url += "%s" % user
399 if pswd:
400 url += ":%s" % pswd
401 url += "@"
402 if host and type != "file":
403 url += "%s" % host
404 # Standardise path to ensure comparisons work
405 while '//' in path:
406 path = path.replace("//", "/")
407 url += "%s" % urllib.quote(path)
408 if p:
409 for parm in p:
410 url += ";%s=%s" % (parm, p[parm])
411
412 return url
413
414def uri_replace(ud, uri_find, uri_replace, replacements, d):
415 if not ud.url or not uri_find or not uri_replace:
416 logger.error("uri_replace: passed an undefined value, not replacing")
417 return None
418 uri_decoded = list(decodeurl(ud.url))
419 uri_find_decoded = list(decodeurl(uri_find))
420 uri_replace_decoded = list(decodeurl(uri_replace))
421 logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
422 result_decoded = ['', '', '', '', '', {}]
423 for loc, i in enumerate(uri_find_decoded):
424 result_decoded[loc] = uri_decoded[loc]
425 regexp = i
426 if loc == 0 and regexp and not regexp.endswith("$"):
427 # Leaving the type unanchored can mean "https" matching "file" can become "files"
428 # which is clearly undesirable.
429 regexp += "$"
430 if loc == 5:
431 # Handle URL parameters
432 if i:
433 # Any specified URL parameters must match
434 for k in uri_replace_decoded[loc]:
435 if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
436 return None
437 # Overwrite any specified replacement parameters
438 for k in uri_replace_decoded[loc]:
439 for l in replacements:
440 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
441 result_decoded[loc][k] = uri_replace_decoded[loc][k]
442 elif (re.match(regexp, uri_decoded[loc])):
443 if not uri_replace_decoded[loc]:
444 result_decoded[loc] = ""
445 else:
446 for k in replacements:
447 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
448 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
449 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
450 if loc == 2:
451 # Handle path manipulations
452 basename = None
453 if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
454 # If the source and destination url types differ, must be a mirrortarball mapping
455 basename = os.path.basename(ud.mirrortarball)
456 # Kill parameters, they make no sense for mirror tarballs
457 uri_decoded[5] = {}
458 elif ud.localpath and ud.method.supports_checksum(ud):
459 basename = os.path.basename(ud.localpath)
460 if basename and not result_decoded[loc].endswith(basename):
461 result_decoded[loc] = os.path.join(result_decoded[loc], basename)
462 else:
463 return None
464 result = encodeurl(result_decoded)
465 if result == ud.url:
466 return None
467 logger.debug(2, "For url %s returning %s" % (ud.url, result))
468 return result
469
470methods = []
471urldata_cache = {}
472saved_headrevs = {}
473
474def fetcher_init(d):
475 """
476 Called to initialize the fetchers once the configuration data is known.
477 Calls before this must not hit the cache.
478 """
479 # When to drop SCM head revisions controlled by user policy
480 srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear"
481 if srcrev_policy == "cache":
482 logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
483 elif srcrev_policy == "clear":
484 logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
485 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
486 try:
487 bb.fetch2.saved_headrevs = revs.items()
488 except:
489 pass
490 revs.clear()
491 else:
492 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
493
494 _checksum_cache.init_cache(d)
495
496 for m in methods:
497 if hasattr(m, "init"):
498 m.init(d)
499
500def fetcher_parse_save(d):
501 _checksum_cache.save_extras(d)
502
503def fetcher_parse_done(d):
504 _checksum_cache.save_merge(d)
505
506def fetcher_compare_revisions(d):
507 """
508 Compare the revisions in the persistant cache with current values and
509 return true/false on whether they've changed.
510 """
511
512 data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
513 data2 = bb.fetch2.saved_headrevs
514
515 changed = False
516 for key in data:
517 if key not in data2 or data2[key] != data[key]:
518 logger.debug(1, "%s changed", key)
519 changed = True
520 return True
521 else:
522 logger.debug(2, "%s did not change", key)
523 return False
524
525def mirror_from_string(data):
526 return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
527
528def verify_checksum(ud, d):
529 """
530 verify the MD5 and SHA256 checksum for downloaded src
531
532 Raises a FetchError if one or both of the SRC_URI checksums do not match
533 the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
534 checksums specified.
535
536 """
537
538 if not ud.method.supports_checksum(ud):
539 return
540
541 md5data = bb.utils.md5_file(ud.localpath)
542 sha256data = bb.utils.sha256_file(ud.localpath)
543
544 if ud.method.recommends_checksum(ud):
545 # If strict checking enabled and neither sum defined, raise error
546 strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
547 if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
548 logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
549 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
550 (ud.localpath, ud.md5_name, md5data,
551 ud.sha256_name, sha256data))
552 raise NoChecksumError('Missing SRC_URI checksum', ud.url)
553
554 # Log missing sums so user can more easily add them
555 if not ud.md5_expected:
556 logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
557 'SRC_URI[%s] = "%s"',
558 ud.localpath, ud.md5_name, md5data)
559
560 if not ud.sha256_expected:
561 logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
562 'SRC_URI[%s] = "%s"',
563 ud.localpath, ud.sha256_name, sha256data)
564
565 md5mismatch = False
566 sha256mismatch = False
567
568 if ud.md5_expected != md5data:
569 md5mismatch = True
570
571 if ud.sha256_expected != sha256data:
572 sha256mismatch = True
573
574 # We want to alert the user if a checksum is defined in the recipe but
575 # it does not match.
576 msg = ""
577 mismatch = False
578 if md5mismatch and ud.md5_expected:
579 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
580 mismatch = True;
581
582 if sha256mismatch and ud.sha256_expected:
583 msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
584 mismatch = True;
585
586 if mismatch:
587 msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
588
589 if len(msg):
590 raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
591
592
593def update_stamp(ud, d):
594 """
595 donestamp is file stamp indicating the whole fetching is done
596 this function update the stamp after verifying the checksum
597 """
598 if os.path.exists(ud.donestamp):
599 # Touch the done stamp file to show active use of the download
600 try:
601 os.utime(ud.donestamp, None)
602 except:
603 # Errors aren't fatal here
604 pass
605 else:
606 verify_checksum(ud, d)
607 open(ud.donestamp, 'w').close()
608
609def subprocess_setup():
610 # Python installs a SIGPIPE handler by default. This is usually not what
611 # non-Python subprocesses expect.
612 # SIGPIPE errors are known issues with gzip/bash
613 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
614
615def get_autorev(d):
616 # only not cache src rev in autorev case
617 if d.getVar('BB_SRCREV_POLICY', True) != "cache":
618 d.setVar('__BB_DONT_CACHE', '1')
619 return "AUTOINC"
620
621def get_srcrev(d):
622 """
623 Return the version string for the current package
624 (usually to be used as PV)
625 Most packages usually only have one SCM so we just pass on the call.
626 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
627 have been set.
628 """
629
630 scms = []
631 fetcher = Fetch(d.getVar('SRC_URI', True).split(), d)
632 urldata = fetcher.ud
633 for u in urldata:
634 if urldata[u].method.supports_srcrev():
635 scms.append(u)
636
637 if len(scms) == 0:
638 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
639
640 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
641 autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
642 if len(rev) > 10:
643 rev = rev[:10]
644 if autoinc:
645 return "AUTOINC+" + rev
646 return rev
647
648 #
649 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
650 #
651 format = d.getVar('SRCREV_FORMAT', True)
652 if not format:
653 raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
654
655 seenautoinc = False
656 for scm in scms:
657 ud = urldata[scm]
658 for name in ud.names:
659 autoinc, rev = ud.method.sortable_revision(ud, d, name)
660 seenautoinc = seenautoinc or autoinc
661 if len(rev) > 10:
662 rev = rev[:10]
663 format = format.replace(name, rev)
664 if seenautoinc:
665 format = "AUTOINC+" + format
666
667 return format
668
669def localpath(url, d):
670 fetcher = bb.fetch2.Fetch([url], d)
671 return fetcher.localpath(url)
672
673def runfetchcmd(cmd, d, quiet = False, cleanup = []):
674 """
675 Run cmd returning the command output
676 Raise an error if interrupted or cmd fails
677 Optionally echo command output to stdout
678 Optionally remove the files/directories listed in cleanup upon failure
679 """
680
681 # Need to export PATH as binary could be in metadata paths
682 # rather than host provided
683 # Also include some other variables.
684 # FIXME: Should really include all export varaiables?
685 exportvars = ['HOME', 'PATH',
686 'HTTP_PROXY', 'http_proxy',
687 'HTTPS_PROXY', 'https_proxy',
688 'FTP_PROXY', 'ftp_proxy',
689 'FTPS_PROXY', 'ftps_proxy',
690 'NO_PROXY', 'no_proxy',
691 'ALL_PROXY', 'all_proxy',
692 'GIT_PROXY_COMMAND',
693 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
694 'SOCKS5_USER', 'SOCKS5_PASSWD']
695
696 for var in exportvars:
697 val = d.getVar(var, True)
698 if val:
699 cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
700
701 logger.debug(1, "Running %s", cmd)
702
703 success = False
704 error_message = ""
705
706 try:
707 (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE)
708 success = True
709 except bb.process.NotFoundError as e:
710 error_message = "Fetch command %s" % (e.command)
711 except bb.process.ExecutionError as e:
712 if e.stdout:
713 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
714 elif e.stderr:
715 output = "output:\n%s" % e.stderr
716 else:
717 output = "no output"
718 error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
719 except bb.process.CmdError as e:
720 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
721 if not success:
722 for f in cleanup:
723 try:
724 bb.utils.remove(f, True)
725 except OSError:
726 pass
727
728 raise FetchError(error_message)
729
730 return output
731
732def check_network_access(d, info = "", url = None):
733 """
734 log remote network access, and error if BB_NO_NETWORK is set
735 """
736 if d.getVar("BB_NO_NETWORK", True) == "1":
737 raise NetworkAccess(url, info)
738 else:
739 logger.debug(1, "Fetcher accessed the network with the command %s" % info)
740
741def build_mirroruris(origud, mirrors, ld):
742 uris = []
743 uds = []
744
745 replacements = {}
746 replacements["TYPE"] = origud.type
747 replacements["HOST"] = origud.host
748 replacements["PATH"] = origud.path
749 replacements["BASENAME"] = origud.path.split("/")[-1]
750 replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
751
752 def adduri(ud, uris, uds):
753 for line in mirrors:
754 try:
755 (find, replace) = line
756 except ValueError:
757 continue
758 newuri = uri_replace(ud, find, replace, replacements, ld)
759 if not newuri or newuri in uris or newuri == origud.url:
760 continue
761 try:
762 newud = FetchData(newuri, ld)
763 newud.setup_localpath(ld)
764 except bb.fetch2.BBFetchException as e:
765 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
766 logger.debug(1, str(e))
767 try:
768 ud.method.clean(ud, ld)
769 except UnboundLocalError:
770 pass
771 continue
772 uris.append(newuri)
773 uds.append(newud)
774
775 adduri(newud, uris, uds)
776
777 adduri(origud, uris, uds)
778
779 return uris, uds
780
781def rename_bad_checksum(ud, suffix):
782 """
783 Renames files to have suffix from parameter
784 """
785
786 if ud.localpath is None:
787 return
788
789 new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
790 bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
791 bb.utils.movefile(ud.localpath, new_localpath)
792
793
794def try_mirror_url(origud, ud, ld, check = False):
795 # Return of None or a value means we're finished
796 # False means try another url
797 try:
798 if check:
799 found = ud.method.checkstatus(ud, ld)
800 if found:
801 return found
802 return False
803
804 os.chdir(ld.getVar("DL_DIR", True))
805
806 if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
807 ud.method.download(ud, ld)
808 if hasattr(ud.method,"build_mirror_data"):
809 ud.method.build_mirror_data(ud, ld)
810
811 if not ud.localpath or not os.path.exists(ud.localpath):
812 return False
813
814 if ud.localpath == origud.localpath:
815 return ud.localpath
816
817 # We may be obtaining a mirror tarball which needs further processing by the real fetcher
818 # If that tarball is a local file:// we need to provide a symlink to it
819 dldir = ld.getVar("DL_DIR", True)
820 if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
821 and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
822 bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
823 open(ud.donestamp, 'w').close()
824 dest = os.path.join(dldir, os.path.basename(ud.localpath))
825 if not os.path.exists(dest):
826 os.symlink(ud.localpath, dest)
827 if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
828 origud.method.download(origud, ld)
829 if hasattr(origud.method,"build_mirror_data"):
830 origud.method.build_mirror_data(origud, ld)
831 return ud.localpath
832 # Otherwise the result is a local file:// and we symlink to it
833 if not os.path.exists(origud.localpath):
834 if os.path.islink(origud.localpath):
835 # Broken symbolic link
836 os.unlink(origud.localpath)
837
838 os.symlink(ud.localpath, origud.localpath)
839 update_stamp(origud, ld)
840 return ud.localpath
841
842 except bb.fetch2.NetworkAccess:
843 raise
844
845 except bb.fetch2.BBFetchException as e:
846 if isinstance(e, ChecksumError):
847 logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
848 logger.warn(str(e))
849 rename_bad_checksum(ud, e.checksum)
850 elif isinstance(e, NoChecksumError):
851 raise
852 else:
853 logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
854 logger.debug(1, str(e))
855 try:
856 ud.method.clean(ud, ld)
857 except UnboundLocalError:
858 pass
859 return False
860
861def try_mirrors(d, origud, mirrors, check = False):
862 """
863 Try to use a mirrored version of the sources.
864 This method will be automatically called before the fetchers go.
865
866 d Is a bb.data instance
867 uri is the original uri we're trying to download
868 mirrors is the list of mirrors we're going to try
869 """
870 ld = d.createCopy()
871
872 uris, uds = build_mirroruris(origud, mirrors, ld)
873
874 for index, uri in enumerate(uris):
875 ret = try_mirror_url(origud, uds[index], ld, check)
876 if ret != False:
877 return ret
878 return None
879
880def srcrev_internal_helper(ud, d, name):
881 """
882 Return:
883 a) a source revision if specified
884 b) latest revision if SRCREV="AUTOINC"
885 c) None if not specified
886 """
887
888 srcrev = None
889 pn = d.getVar("PN", True)
890 attempts = []
891 if name != '' and pn:
892 attempts.append("SRCREV_%s_pn-%s" % (name, pn))
893 if name != '':
894 attempts.append("SRCREV_%s" % name)
895 if pn:
896 attempts.append("SRCREV_pn-%s" % pn)
897 attempts.append("SRCREV")
898
899 for a in attempts:
900 srcrev = d.getVar(a, True)
901 if srcrev and srcrev != "INVALID":
902 break
903
904 if 'rev' in ud.parm and 'tag' in ud.parm:
905 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
906
907 if 'rev' in ud.parm or 'tag' in ud.parm:
908 if 'rev' in ud.parm:
909 parmrev = ud.parm['rev']
910 else:
911 parmrev = ud.parm['tag']
912 if srcrev == "INVALID" or not srcrev:
913 return parmrev
914 if srcrev != parmrev:
915 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev))
916 return parmrev
917
918 if srcrev == "INVALID" or not srcrev:
919 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
920 if srcrev == "AUTOINC":
921 srcrev = ud.method.latest_revision(ud, d, name)
922
923 return srcrev
924
925def get_checksum_file_list(d):
926 """ Get a list of files checksum in SRC_URI
927
928 Returns the resolved local paths of all local file entries in
929 SRC_URI as a space-separated string
930 """
931 fetch = Fetch([], d, cache = False, localonly = True)
932
933 dl_dir = d.getVar('DL_DIR', True)
934 filelist = []
935 for u in fetch.urls:
936 ud = fetch.ud[u]
937
938 if ud and isinstance(ud.method, local.Local):
939 paths = ud.method.localpaths(ud, d)
940 for f in paths:
941 pth = ud.decodedurl
942 if '*' in pth:
943 f = os.path.join(os.path.abspath(f), pth)
944 if f.startswith(dl_dir):
945 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
946 if os.path.exists(f):
947 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
948 else:
949 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
950 filelist.append(f + ":" + str(os.path.exists(f)))
951
952 return " ".join(filelist)
953
954def get_file_checksums(filelist, pn):
955 """Get a list of the checksums for a list of local files
956
957 Returns the checksums for a list of local files, caching the results as
958 it proceeds
959
960 """
961
962 def checksum_file(f):
963 try:
964 checksum = _checksum_cache.get_checksum(f)
965 except OSError as e:
966 bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
967 return None
968 return checksum
969
970 def checksum_dir(pth):
971 # Handle directories recursively
972 dirchecksums = []
973 for root, dirs, files in os.walk(pth):
974 for name in files:
975 fullpth = os.path.join(root, name)
976 checksum = checksum_file(fullpth)
977 if checksum:
978 dirchecksums.append((fullpth, checksum))
979 return dirchecksums
980
981 checksums = []
982 for pth in filelist.split():
983 exist = pth.split(":")[1]
984 if exist == "False":
985 continue
986 pth = pth.split(":")[0]
987 if '*' in pth:
988 # Handle globs
989 for f in glob.glob(pth):
990 if os.path.isdir(f):
991 checksums.extend(checksum_dir(f))
992 else:
993 checksum = checksum_file(f)
994 checksums.append((f, checksum))
995 elif os.path.isdir(pth):
996 checksums.extend(checksum_dir(pth))
997 else:
998 checksum = checksum_file(pth)
999 checksums.append((pth, checksum))
1000
1001 checksums.sort(key=operator.itemgetter(1))
1002 return checksums
1003
1004
1005class FetchData(object):
1006 """
1007 A class which represents the fetcher state for a given URI.
1008 """
1009 def __init__(self, url, d, localonly = False):
1010 # localpath is the location of a downloaded result. If not set, the file is local.
1011 self.donestamp = None
1012 self.localfile = ""
1013 self.localpath = None
1014 self.lockfile = None
1015 self.mirrortarball = None
1016 self.basename = None
1017 self.basepath = None
1018 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
1019 self.date = self.getSRCDate(d)
1020 self.url = url
1021 if not self.user and "user" in self.parm:
1022 self.user = self.parm["user"]
1023 if not self.pswd and "pswd" in self.parm:
1024 self.pswd = self.parm["pswd"]
1025 self.setup = False
1026
1027 if "name" in self.parm:
1028 self.md5_name = "%s.md5sum" % self.parm["name"]
1029 self.sha256_name = "%s.sha256sum" % self.parm["name"]
1030 else:
1031 self.md5_name = "md5sum"
1032 self.sha256_name = "sha256sum"
1033 if self.md5_name in self.parm:
1034 self.md5_expected = self.parm[self.md5_name]
1035 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1036 self.md5_expected = None
1037 else:
1038 self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
1039 if self.sha256_name in self.parm:
1040 self.sha256_expected = self.parm[self.sha256_name]
1041 elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
1042 self.sha256_expected = None
1043 else:
1044 self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
1045
1046 self.names = self.parm.get("name",'default').split(',')
1047
1048 self.method = None
1049 for m in methods:
1050 if m.supports(self, d):
1051 self.method = m
1052 break
1053
1054 if not self.method:
1055 raise NoMethodError(url)
1056
1057 if localonly and not isinstance(self.method, local.Local):
1058 raise NonLocalMethod()
1059
1060 if self.parm.get("proto", None) and "protocol" not in self.parm:
1061 logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
1062 self.parm["protocol"] = self.parm.get("proto", None)
1063
1064 if hasattr(self.method, "urldata_init"):
1065 self.method.urldata_init(self, d)
1066
1067 if "localpath" in self.parm:
1068 # if user sets localpath for file, use it instead.
1069 self.localpath = self.parm["localpath"]
1070 self.basename = os.path.basename(self.localpath)
1071 elif self.localfile:
1072 self.localpath = self.method.localpath(self, d)
1073
1074 dldir = d.getVar("DL_DIR", True)
1075 # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
1076 if self.localpath and self.localpath.startswith(dldir):
1077 basepath = self.localpath
1078 elif self.localpath:
1079 basepath = dldir + os.sep + os.path.basename(self.localpath)
1080 else:
1081 basepath = dldir + os.sep + (self.basepath or self.basename)
1082 self.donestamp = basepath + '.done'
1083 self.lockfile = basepath + '.lock'
1084
1085 def setup_revisons(self, d):
1086 self.revisions = {}
1087 for name in self.names:
1088 self.revisions[name] = srcrev_internal_helper(self, d, name)
1089
1090 # add compatibility code for non name specified case
1091 if len(self.names) == 1:
1092 self.revision = self.revisions[self.names[0]]
1093
1094 def setup_localpath(self, d):
1095 if not self.localpath:
1096 self.localpath = self.method.localpath(self, d)
1097
1098 def getSRCDate(self, d):
1099 """
1100 Return the SRC Date for the component
1101
1102 d the bb.data module
1103 """
1104 if "srcdate" in self.parm:
1105 return self.parm['srcdate']
1106
1107 pn = d.getVar("PN", True)
1108
1109 if pn:
1110 return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1111
1112 return d.getVar("SRCDATE", True) or d.getVar("DATE", True)
1113
1114class FetchMethod(object):
1115 """Base class for 'fetch'ing data"""
1116
1117 def __init__(self, urls = []):
1118 self.urls = []
1119
1120 def supports(self, urldata, d):
1121 """
1122 Check to see if this fetch class supports a given url.
1123 """
1124 return 0
1125
1126 def localpath(self, urldata, d):
1127 """
1128 Return the local filename of a given url assuming a successful fetch.
1129 Can also setup variables in urldata for use in go (saving code duplication
1130 and duplicate code execution)
1131 """
1132 return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
1133
1134 def supports_checksum(self, urldata):
1135 """
1136 Is localpath something that can be represented by a checksum?
1137 """
1138
1139 # We cannot compute checksums for directories
1140 if os.path.isdir(urldata.localpath) == True:
1141 return False
1142 if urldata.localpath.find("*") != -1:
1143 return False
1144
1145 return True
1146
1147 def recommends_checksum(self, urldata):
1148 """
1149 Is the backend on where checksumming is recommended (should warnings
1150 be displayed if there is no checksum)?
1151 """
1152 return False
1153
1154 def _strip_leading_slashes(self, relpath):
1155 """
1156 Remove leading slash as os.path.join can't cope
1157 """
1158 while os.path.isabs(relpath):
1159 relpath = relpath[1:]
1160 return relpath
1161
1162 def setUrls(self, urls):
1163 self.__urls = urls
1164
1165 def getUrls(self):
1166 return self.__urls
1167
1168 urls = property(getUrls, setUrls, None, "Urls property")
1169
1170 def need_update(self, ud, d):
1171 """
1172 Force a fetch, even if localpath exists?
1173 """
1174 if os.path.exists(ud.localpath):
1175 return False
1176 return True
1177
1178 def supports_srcrev(self):
1179 """
1180 The fetcher supports auto source revisions (SRCREV)
1181 """
1182 return False
1183
1184 def download(self, urldata, d):
1185 """
1186 Fetch urls
1187 Assumes localpath was called first
1188 """
1189 raise NoMethodError(url)
1190
1191 def unpack(self, urldata, rootdir, data):
1192 iterate = False
1193 file = urldata.localpath
1194
1195 try:
1196 unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
1197 except ValueError as exc:
1198 bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
1199 (file, urldata.parm.get('unpack')))
1200
1201 dots = file.split(".")
1202 if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
1203 efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
1204 else:
1205 efile = file
1206 cmd = None
1207
1208 if unpack:
1209 if file.endswith('.tar'):
1210 cmd = 'tar x --no-same-owner -f %s' % file
1211 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1212 cmd = 'tar xz --no-same-owner -f %s' % file
1213 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1214 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
1215 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1216 cmd = 'gzip -dc %s > %s' % (file, efile)
1217 elif file.endswith('.bz2'):
1218 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1219 elif file.endswith('.tar.xz'):
1220 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
1221 elif file.endswith('.xz'):
1222 cmd = 'xz -dc %s > %s' % (file, efile)
1223 elif file.endswith('.zip') or file.endswith('.jar'):
1224 try:
1225 dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
1226 except ValueError as exc:
1227 bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
1228 (file, urldata.parm.get('dos')))
1229 cmd = 'unzip -q -o'
1230 if dos:
1231 cmd = '%s -a' % cmd
1232 cmd = "%s '%s'" % (cmd, file)
1233 elif file.endswith('.rpm') or file.endswith('.srpm'):
1234 if 'extract' in urldata.parm:
1235 unpack_file = urldata.parm.get('extract')
1236 cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
1237 iterate = True
1238 iterate_file = unpack_file
1239 else:
1240 cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
1241 elif file.endswith('.deb') or file.endswith('.ipk'):
1242 cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
1243
1244 if not unpack or not cmd:
1245 # If file == dest, then avoid any copies, as we already put the file into dest!
1246 dest = os.path.join(rootdir, os.path.basename(file))
1247 if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
1248 if os.path.isdir(file):
1249 # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
1250 basepath = getattr(urldata, "basepath", None)
1251 destdir = "."
1252 if basepath and basepath.endswith("/"):
1253 basepath = basepath.rstrip("/")
1254 elif basepath:
1255 basepath = os.path.dirname(basepath)
1256 if basepath and basepath.find("/") != -1:
1257 destdir = basepath[:basepath.rfind('/')]
1258 destdir = destdir.strip('/')
1259 if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
1260 os.makedirs("%s/%s" % (rootdir, destdir))
1261 cmd = 'cp -fpPR %s %s/%s/' % (file, rootdir, destdir)
1262 #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
1263 else:
1264 # The "destdir" handling was specifically done for FILESPATH
1265 # items. So, only do so for file:// entries.
1266 if urldata.type == "file" and urldata.path.find("/") != -1:
1267 destdir = urldata.path.rsplit("/", 1)[0]
1268 if urldata.parm.get('subdir') != None:
1269 destdir = urldata.parm.get('subdir') + "/" + destdir
1270 else:
1271 if urldata.parm.get('subdir') != None:
1272 destdir = urldata.parm.get('subdir')
1273 else:
1274 destdir = "."
1275 bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
1276 cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir)
1277
1278 if not cmd:
1279 return
1280
1281 # Change to subdir before executing command
1282 save_cwd = os.getcwd();
1283 os.chdir(rootdir)
1284 if 'subdir' in urldata.parm:
1285 newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
1286 bb.utils.mkdirhier(newdir)
1287 os.chdir(newdir)
1288
1289 path = data.getVar('PATH', True)
1290 if path:
1291 cmd = "PATH=\"%s\" %s" % (path, cmd)
1292 bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
1293 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
1294
1295 os.chdir(save_cwd)
1296
1297 if ret != 0:
1298 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
1299
1300 if iterate is True:
1301 iterate_urldata = urldata
1302 iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
1303 self.unpack(urldata, rootdir, data)
1304
1305 return
1306
1307 def clean(self, urldata, d):
1308 """
1309 Clean any existing full or partial download
1310 """
1311 bb.utils.remove(urldata.localpath)
1312
1313 def try_premirror(self, urldata, d):
1314 """
1315 Should premirrors be used?
1316 """
1317 return True
1318
1319 def checkstatus(self, urldata, d):
1320 """
1321 Check the status of a URL
1322 Assumes localpath was called first
1323 """
1324 logger.info("URL %s could not be checked for status since no method exists.", url)
1325 return True
1326
1327 def latest_revision(self, ud, d, name):
1328 """
1329 Look in the cache for the latest revision, if not present ask the SCM.
1330 """
1331 if not hasattr(self, "_latest_revision"):
1332 raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
1333
1334 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1335 key = self.generate_revision_key(ud, d, name)
1336 try:
1337 return revs[key]
1338 except KeyError:
1339 revs[key] = rev = self._latest_revision(ud, d, name)
1340 return rev
1341
1342 def sortable_revision(self, ud, d, name):
1343 latest_rev = self._build_revision(ud, d, name)
1344 return True, str(latest_rev)
1345
1346 def generate_revision_key(self, ud, d, name):
1347 key = self._revision_key(ud, d, name)
1348 return "%s-%s" % (key, d.getVar("PN", True) or "")
1349
1350class Fetch(object):
1351 def __init__(self, urls, d, cache = True, localonly = False):
1352 if localonly and cache:
1353 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1354
1355 if len(urls) == 0:
1356 urls = d.getVar("SRC_URI", True).split()
1357 self.urls = urls
1358 self.d = d
1359 self.ud = {}
1360
1361 fn = d.getVar('FILE', True)
1362 if cache and fn and fn in urldata_cache:
1363 self.ud = urldata_cache[fn]
1364
1365 for url in urls:
1366 if url not in self.ud:
1367 try:
1368 self.ud[url] = FetchData(url, d, localonly)
1369 except NonLocalMethod:
1370 if localonly:
1371 self.ud[url] = None
1372 pass
1373
1374 if fn and cache:
1375 urldata_cache[fn] = self.ud
1376
1377 def localpath(self, url):
1378 if url not in self.urls:
1379 self.ud[url] = FetchData(url, self.d)
1380
1381 self.ud[url].setup_localpath(self.d)
1382 return self.d.expand(self.ud[url].localpath)
1383
1384 def localpaths(self):
1385 """
1386 Return a list of the local filenames, assuming successful fetch
1387 """
1388 local = []
1389
1390 for u in self.urls:
1391 ud = self.ud[u]
1392 ud.setup_localpath(self.d)
1393 local.append(ud.localpath)
1394
1395 return local
1396
1397 def download(self, urls = []):
1398 """
1399 Fetch all urls
1400 """
1401 if len(urls) == 0:
1402 urls = self.urls
1403
1404 network = self.d.getVar("BB_NO_NETWORK", True)
1405 premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1")
1406
1407 for u in urls:
1408 ud = self.ud[u]
1409 ud.setup_localpath(self.d)
1410 m = ud.method
1411 localpath = ""
1412
1413 lf = bb.utils.lockfile(ud.lockfile)
1414
1415 try:
1416 self.d.setVar("BB_NO_NETWORK", network)
1417
1418 if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
1419 localpath = ud.localpath
1420 elif m.try_premirror(ud, self.d):
1421 logger.debug(1, "Trying PREMIRRORS")
1422 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1423 localpath = try_mirrors(self.d, ud, mirrors, False)
1424
1425 if premirroronly:
1426 self.d.setVar("BB_NO_NETWORK", "1")
1427
1428 os.chdir(self.d.getVar("DL_DIR", True))
1429
1430 firsterr = None
1431 if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
1432 try:
1433 logger.debug(1, "Trying Upstream")
1434 m.download(ud, self.d)
1435 if hasattr(m, "build_mirror_data"):
1436 m.build_mirror_data(ud, self.d)
1437 localpath = ud.localpath
1438 # early checksum verify, so that if checksum mismatched,
1439 # fetcher still have chance to fetch from mirror
1440 update_stamp(ud, self.d)
1441
1442 except bb.fetch2.NetworkAccess:
1443 raise
1444
1445 except BBFetchException as e:
1446 if isinstance(e, ChecksumError):
1447 logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
1448 logger.debug(1, str(e))
1449 rename_bad_checksum(ud, e.checksum)
1450 elif isinstance(e, NoChecksumError):
1451 raise
1452 else:
1453 logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
1454 logger.debug(1, str(e))
1455 firsterr = e
1456 # Remove any incomplete fetch
1457 m.clean(ud, self.d)
1458 logger.debug(1, "Trying MIRRORS")
1459 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1460 localpath = try_mirrors (self.d, ud, mirrors)
1461
1462 if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
1463 if firsterr:
1464 logger.error(str(firsterr))
1465 raise FetchError("Unable to fetch URL from any source.", u)
1466
1467 update_stamp(ud, self.d)
1468
1469 except BBFetchException as e:
1470 if isinstance(e, ChecksumError):
1471 logger.error("Checksum failure fetching %s" % u)
1472 raise
1473
1474 finally:
1475 bb.utils.unlockfile(lf)
1476
1477 def checkstatus(self, urls = []):
1478 """
1479 Check all urls exist upstream
1480 """
1481
1482 if len(urls) == 0:
1483 urls = self.urls
1484
1485 for u in urls:
1486 ud = self.ud[u]
1487 ud.setup_localpath(self.d)
1488 m = ud.method
1489 logger.debug(1, "Testing URL %s", u)
1490 # First try checking uri, u, from PREMIRRORS
1491 mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
1492 ret = try_mirrors(self.d, ud, mirrors, True)
1493 if not ret:
1494 # Next try checking from the original uri, u
1495 try:
1496 ret = m.checkstatus(ud, self.d)
1497 except:
1498 # Finally, try checking uri, u, from MIRRORS
1499 mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
1500 ret = try_mirrors(self.d, ud, mirrors, True)
1501
1502 if not ret:
1503 raise FetchError("URL %s doesn't work" % u, u)
1504
1505 def unpack(self, root, urls = []):
1506 """
1507 Check all urls exist upstream
1508 """
1509
1510 if len(urls) == 0:
1511 urls = self.urls
1512
1513 for u in urls:
1514 ud = self.ud[u]
1515 ud.setup_localpath(self.d)
1516
1517 if self.d.expand(self.localpath) is None:
1518 continue
1519
1520 if ud.lockfile:
1521 lf = bb.utils.lockfile(ud.lockfile)
1522
1523 ud.method.unpack(ud, root, self.d)
1524
1525 if ud.lockfile:
1526 bb.utils.unlockfile(lf)
1527
1528 def clean(self, urls = []):
1529 """
1530 Clean files that the fetcher gets or places
1531 """
1532
1533 if len(urls) == 0:
1534 urls = self.urls
1535
1536 for url in urls:
1537 if url not in self.ud:
1538 self.ud[url] = FetchData(url, d)
1539 ud = self.ud[url]
1540 ud.setup_localpath(self.d)
1541
1542 if not ud.localfile and ud.localpath is None:
1543 continue
1544
1545 if ud.lockfile:
1546 lf = bb.utils.lockfile(ud.lockfile)
1547
1548 ud.method.clean(ud, self.d)
1549 if ud.donestamp:
1550 bb.utils.remove(ud.donestamp)
1551
1552 if ud.lockfile:
1553 bb.utils.unlockfile(lf)
1554
1555from . import cvs
1556from . import git
1557from . import gitsm
1558from . import gitannex
1559from . import local
1560from . import svn
1561from . import wget
1562from . import ssh
1563from . import sftp
1564from . import perforce
1565from . import bzr
1566from . import hg
1567from . import osc
1568from . import repo
1569from . import clearcase
1570
1571methods.append(local.Local())
1572methods.append(wget.Wget())
1573methods.append(svn.Svn())
1574methods.append(git.Git())
1575methods.append(gitsm.GitSM())
1576methods.append(gitannex.GitANNEX())
1577methods.append(cvs.Cvs())
1578methods.append(ssh.SSH())
1579methods.append(sftp.SFTP())
1580methods.append(perforce.Perforce())
1581methods.append(bzr.Bzr())
1582methods.append(hg.Hg())
1583methods.append(osc.Osc())
1584methods.append(repo.Repo())
1585methods.append(clearcase.ClearCase())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
new file mode 100644
index 0000000000..03e9ac461b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/bzr.py
@@ -0,0 +1,143 @@
1"""
2BitBake 'Fetch' implementation for bzr.
3
4"""
5
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 Richard Purdie
8#
9# Classes for obtaining upstream sources for the
10# BitBake build tools.
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import sys
28import logging
29import bb
30from bb import data
31from bb.fetch2 import FetchMethod
32from bb.fetch2 import FetchError
33from bb.fetch2 import runfetchcmd
34from bb.fetch2 import logger
35
36class Bzr(FetchMethod):
37 def supports(self, ud, d):
38 return ud.type in ['bzr']
39
40 def urldata_init(self, ud, d):
41 """
42 init bzr specific variable within url data
43 """
44 # Create paths to bzr checkouts
45 relpath = self._strip_leading_slashes(ud.path)
46 ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
47
48 ud.setup_revisons(d)
49
50 if not ud.revision:
51 ud.revision = self.latest_revision(ud, d)
52
53 ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
54
55 def _buildbzrcommand(self, ud, d, command):
56 """
57 Build up an bzr commandline based on ud
58 command is "fetch", "update", "revno"
59 """
60
61 basecmd = data.expand('${FETCHCMD_bzr}', d)
62
63 proto = ud.parm.get('protocol', 'http')
64
65 bzrroot = ud.host + ud.path
66
67 options = []
68
69 if command == "revno":
70 bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
71 else:
72 if ud.revision:
73 options.append("-r %s" % ud.revision)
74
75 if command == "fetch":
76 bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
77 elif command == "update":
78 bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
79 else:
80 raise FetchError("Invalid bzr command %s" % command, ud.url)
81
82 return bzrcmd
83
84 def download(self, ud, d):
85 """Fetch url"""
86
87 if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
88 bzrcmd = self._buildbzrcommand(ud, d, "update")
89 logger.debug(1, "BZR Update %s", ud.url)
90 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
91 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
92 runfetchcmd(bzrcmd, d)
93 else:
94 bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
95 bzrcmd = self._buildbzrcommand(ud, d, "fetch")
96 bb.fetch2.check_network_access(d, bzrcmd, ud.url)
97 logger.debug(1, "BZR Checkout %s", ud.url)
98 bb.utils.mkdirhier(ud.pkgdir)
99 os.chdir(ud.pkgdir)
100 logger.debug(1, "Running %s", bzrcmd)
101 runfetchcmd(bzrcmd, d)
102
103 os.chdir(ud.pkgdir)
104
105 scmdata = ud.parm.get("scmdata", "")
106 if scmdata == "keep":
107 tar_flags = ""
108 else:
109 tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
110
111 # tar them up to a defined filename
112 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
113
114 def supports_srcrev(self):
115 return True
116
117 def _revision_key(self, ud, d, name):
118 """
119 Return a unique key for the url
120 """
121 return "bzr:" + ud.pkgdir
122
123 def _latest_revision(self, ud, d, name):
124 """
125 Return the latest upstream revision number
126 """
127 logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
128
129 bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
130
131 output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
132
133 return output.strip()
134
135 def sortable_revision(self, ud, d, name):
136 """
137 Return a sortable revision number which in our case is the revision number
138 """
139
140 return False, self._build_revision(ud, d)
141
142 def _build_revision(self, ud, d):
143 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py
new file mode 100644
index 0000000000..bfca2f7bcf
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/clearcase.py
@@ -0,0 +1,263 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' clearcase implementation
5
6The clearcase fetcher is used to retrieve files from a ClearCase repository.
7
8Usage in the recipe:
9
10 SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
11 SRCREV = "EXAMPLE_CLEARCASE_TAG"
12 PV = "${@d.getVar("SRCREV").replace("/", "+")}"
13
14The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
15
16Supported SRC_URI options are:
17
18- vob
19 (required) The name of the clearcase VOB (with prepending "/")
20
21- module
22 The module in the selected VOB (with prepending "/")
23
24 The module and vob parameters are combined to create
25 the following load rule in the view config spec:
26 load <vob><module>
27
28- proto
29 http or https
30
31Related variables:
32
33 CCASE_CUSTOM_CONFIG_SPEC
34 Write a config spec to this variable in your recipe to use it instead
35 of the default config spec generated by this fetcher.
36 Please note that the SRCREV loses its functionality if you specify
37 this variable. SRCREV is still used to label the archive after a fetch,
38 but it doesn't define what's fetched.
39
40User credentials:
41 cleartool:
42 The login of cleartool is handled by the system. No special steps needed.
43
44 rcleartool:
45 In order to use rcleartool with authenticated users an `rcleartool login` is
46 necessary before using the fetcher.
47"""
48# Copyright (C) 2014 Siemens AG
49#
50# This program is free software; you can redistribute it and/or modify
51# it under the terms of the GNU General Public License version 2 as
52# published by the Free Software Foundation.
53#
54# This program is distributed in the hope that it will be useful,
55# but WITHOUT ANY WARRANTY; without even the implied warranty of
56# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
57# GNU General Public License for more details.
58#
59# You should have received a copy of the GNU General Public License along
60# with this program; if not, write to the Free Software Foundation, Inc.,
61# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
62#
63
64import os
65import sys
66import shutil
67import bb
68from bb import data
69from bb.fetch2 import FetchMethod
70from bb.fetch2 import FetchError
71from bb.fetch2 import runfetchcmd
72from bb.fetch2 import logger
73from distutils import spawn
74
75class ClearCase(FetchMethod):
76 """Class to fetch urls via 'clearcase'"""
77 def init(self, d):
78 pass
79
80 def supports(self, ud, d):
81 """
82 Check to see if a given url can be fetched with Clearcase.
83 """
84 return ud.type in ['ccrc']
85
86 def debug(self, msg):
87 logger.debug(1, "ClearCase: %s", msg)
88
89 def urldata_init(self, ud, d):
90 """
91 init ClearCase specific variable within url data
92 """
93 ud.proto = "https"
94 if 'protocol' in ud.parm:
95 ud.proto = ud.parm['protocol']
96 if not ud.proto in ('http', 'https'):
97 raise fetch2.ParameterError("Invalid protocol type", ud.url)
98
99 ud.vob = ''
100 if 'vob' in ud.parm:
101 ud.vob = ud.parm['vob']
102 else:
103 msg = ud.url+": vob must be defined so the fetcher knows what to get."
104 raise MissingParameterError('vob', msg)
105
106 if 'module' in ud.parm:
107 ud.module = ud.parm['module']
108 else:
109 ud.module = ""
110
111 ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
112
113 if data.getVar("SRCREV", d, True) == "INVALID":
114 raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
115
116 ud.label = d.getVar("SRCREV")
117 ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
118
119 ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
120
121 ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
122 ud.module.replace("/", "."),
123 ud.label.replace("/", "."))
124
125 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
126 ud.csname = "%s-config-spec" % (ud.identifier)
127 ud.ccasedir = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
128 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
129 ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
130 ud.localfile = "%s.tar.gz" % (ud.identifier)
131
132 self.debug("host = %s" % ud.host)
133 self.debug("path = %s" % ud.path)
134 self.debug("server = %s" % ud.server)
135 self.debug("proto = %s" % ud.proto)
136 self.debug("type = %s" % ud.type)
137 self.debug("vob = %s" % ud.vob)
138 self.debug("module = %s" % ud.module)
139 self.debug("basecmd = %s" % ud.basecmd)
140 self.debug("label = %s" % ud.label)
141 self.debug("ccasedir = %s" % ud.ccasedir)
142 self.debug("viewdir = %s" % ud.viewdir)
143 self.debug("viewname = %s" % ud.viewname)
144 self.debug("configspecfile = %s" % ud.configspecfile)
145 self.debug("localfile = %s" % ud.localfile)
146
147 ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
148
149 def _build_ccase_command(self, ud, command):
150 """
151 Build up a commandline based on ud
152 command is: mkview, setcs, rmview
153 """
154 options = []
155
156 if "rcleartool" in ud.basecmd:
157 options.append("-server %s" % ud.server)
158
159 basecmd = "%s %s" % (ud.basecmd, command)
160
161 if command is 'mkview':
162 if not "rcleartool" in ud.basecmd:
163 # Cleartool needs a -snapshot view
164 options.append("-snapshot")
165 options.append("-tag %s" % ud.viewname)
166 options.append(ud.viewdir)
167
168 elif command is 'rmview':
169 options.append("-force")
170 options.append("%s" % ud.viewdir)
171
172 elif command is 'setcs':
173 options.append("-overwrite")
174 options.append(ud.configspecfile)
175
176 else:
177 raise FetchError("Invalid ccase command %s" % command)
178
179 ccasecmd = "%s %s" % (basecmd, " ".join(options))
180 self.debug("ccasecmd = %s" % ccasecmd)
181 return ccasecmd
182
183 def _write_configspec(self, ud, d):
184 """
185 Create config spec file (ud.configspecfile) for ccase view
186 """
187 config_spec = ""
188 custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
189 if custom_config_spec is not None:
190 for line in custom_config_spec.split("\\n"):
191 config_spec += line+"\n"
192 bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
193 else:
194 config_spec += "element * CHECKEDOUT\n"
195 config_spec += "element * %s\n" % ud.label
196 config_spec += "load %s%s\n" % (ud.vob, ud.module)
197
198 logger.info("Using config spec: \n%s" % config_spec)
199
200 with open(ud.configspecfile, 'w') as f:
201 f.write(config_spec)
202
203 def _remove_view(self, ud, d):
204 if os.path.exists(ud.viewdir):
205 os.chdir(ud.ccasedir)
206 cmd = self._build_ccase_command(ud, 'rmview');
207 logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
208 bb.fetch2.check_network_access(d, cmd, ud.url)
209 output = runfetchcmd(cmd, d)
210 logger.info("rmview output: %s", output)
211
212 def need_update(self, ud, d):
213 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
214 ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
215 return True
216 if os.path.exists(ud.localpath):
217 return False
218 return True
219
220 def supports_srcrev(self):
221 return True
222
223 def sortable_revision(self, ud, d, name):
224 return False, ud.identifier
225
226 def download(self, ud, d):
227 """Fetch url"""
228
229 # Make a fresh view
230 bb.utils.mkdirhier(ud.ccasedir)
231 self._write_configspec(ud, d)
232 cmd = self._build_ccase_command(ud, 'mkview')
233 logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
234 bb.fetch2.check_network_access(d, cmd, ud.url)
235 try:
236 runfetchcmd(cmd, d)
237 except FetchError as e:
238 if "CRCLI2008E" in e.msg:
239 raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
240 else:
241 raise e
242
243 # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
244 os.chdir(ud.viewdir)
245 cmd = self._build_ccase_command(ud, 'setcs');
246 logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
247 bb.fetch2.check_network_access(d, cmd, ud.url)
248 output = runfetchcmd(cmd, d)
249 logger.info("%s", output)
250
251 # Copy the configspec to the viewdir so we have it in our source tarball later
252 shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
253
254 # Clean clearcase meta-data before tar
255
256 runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
257
258 # Clean up so we can create a new view next time
259 self.clean(ud, d);
260
261 def clean(self, ud, d):
262 self._remove_view(ud, d)
263 bb.utils.remove(ud.configspecfile)
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
new file mode 100644
index 0000000000..d27d96f68c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/cvs.py
@@ -0,0 +1,171 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27#
28
29import os
30import logging
31import bb
32from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
33from bb.fetch2 import runfetchcmd
34
35class Cvs(FetchMethod):
36 """
37 Class to fetch a module or modules from cvs repositories
38 """
39 def supports(self, ud, d):
40 """
41 Check to see if a given url can be fetched with cvs.
42 """
43 return ud.type in ['cvs']
44
45 def urldata_init(self, ud, d):
46 if not "module" in ud.parm:
47 raise MissingParameterError("module", ud.url)
48 ud.module = ud.parm["module"]
49
50 ud.tag = ud.parm.get('tag', "")
51
52 # Override the default date in certain cases
53 if 'date' in ud.parm:
54 ud.date = ud.parm['date']
55 elif ud.tag:
56 ud.date = ""
57
58 norecurse = ''
59 if 'norecurse' in ud.parm:
60 norecurse = '_norecurse'
61
62 fullpath = ''
63 if 'fullpath' in ud.parm:
64 fullpath = '_fullpath'
65
66 ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
67
68 def need_update(self, ud, d):
69 if (ud.date == "now"):
70 return True
71 if not os.path.exists(ud.localpath):
72 return True
73 return False
74
75 def download(self, ud, d):
76
77 method = ud.parm.get('method', 'pserver')
78 localdir = ud.parm.get('localdir', ud.module)
79 cvs_port = ud.parm.get('port', '')
80
81 cvs_rsh = None
82 if method == "ext":
83 if "rsh" in ud.parm:
84 cvs_rsh = ud.parm["rsh"]
85
86 if method == "dir":
87 cvsroot = ud.path
88 else:
89 cvsroot = ":" + method
90 cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
91 if cvsproxyhost:
92 cvsroot += ";proxy=" + cvsproxyhost
93 cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
94 if cvsproxyport:
95 cvsroot += ";proxyport=" + cvsproxyport
96 cvsroot += ":" + ud.user
97 if ud.pswd:
98 cvsroot += ":" + ud.pswd
99 cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
100
101 options = []
102 if 'norecurse' in ud.parm:
103 options.append("-l")
104 if ud.date:
105 # treat YYYYMMDDHHMM specially for CVS
106 if len(ud.date) == 12:
107 options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
108 else:
109 options.append("-D \"%s UTC\"" % ud.date)
110 if ud.tag:
111 options.append("-r %s" % ud.tag)
112
113 cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
114 cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
115 cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
116
117 if cvs_rsh:
118 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
119 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
120
121 # create module directory
122 logger.debug(2, "Fetch: checking for module directory")
123 pkg = d.getVar('PN', True)
124 pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
125 moddir = os.path.join(pkgdir, localdir)
126 if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
127 logger.info("Update " + ud.url)
128 bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
129 # update sources there
130 os.chdir(moddir)
131 cmd = cvsupdatecmd
132 else:
133 logger.info("Fetch " + ud.url)
134 # check out sources there
135 bb.utils.mkdirhier(pkgdir)
136 os.chdir(pkgdir)
137 logger.debug(1, "Running %s", cvscmd)
138 bb.fetch2.check_network_access(d, cvscmd, ud.url)
139 cmd = cvscmd
140
141 runfetchcmd(cmd, d, cleanup = [moddir])
142
143 if not os.access(moddir, os.R_OK):
144 raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
145
146 scmdata = ud.parm.get("scmdata", "")
147 if scmdata == "keep":
148 tar_flags = ""
149 else:
150 tar_flags = "--exclude 'CVS'"
151
152 # tar them up to a defined filename
153 if 'fullpath' in ud.parm:
154 os.chdir(pkgdir)
155 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
156 else:
157 os.chdir(moddir)
158 os.chdir('..')
159 cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
160
161 runfetchcmd(cmd, d, cleanup = [ud.localpath])
162
163 def clean(self, ud, d):
164 """ Clean CVS Files and tarballs """
165
166 pkg = d.getVar('PN', True)
167 pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
168
169 bb.utils.remove(pkgdir, True)
170 bb.utils.remove(ud.localpath)
171
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
new file mode 100644
index 0000000000..5573f0a81e
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -0,0 +1,358 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git implementation
5
6git fetcher support the SRC_URI with format of:
7SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
8
9Supported SRC_URI options are:
10
11- branch
12 The git branch to retrieve from. The default is "master"
13
14 This option also supports multiple branch fetching, with branches
15 separated by commas. In multiple branches case, the name option
16 must have the same number of names to match the branches, which is
17 used to specify the SRC_REV for the branch
18 e.g:
19 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
20 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
21 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
22
23- tag
24 The git tag to retrieve. The default is "master"
25
26- protocol
27 The method to use to access the repository. Common options are "git",
28 "http", "https", "file", "ssh" and "rsync". The default is "git".
29
30- rebaseable
31 rebaseable indicates that the upstream git repo may rebase in the future,
32 and current revision may disappear from upstream repo. This option will
33 remind fetcher to preserve local cache carefully for future use.
34 The default value is "0", set rebaseable=1 for rebaseable git repo.
35
36- nocheckout
37 Don't checkout source code when unpacking. set this option for the recipe
38 who has its own routine to checkout code.
39 The default is "0", set nocheckout=1 if needed.
40
41- bareclone
42 Create a bare clone of the source code and don't checkout the source code
43 when unpacking. Set this option for the recipe who has its own routine to
44 checkout code and tracking branch requirements.
45 The default is "0", set bareclone=1 if needed.
46
47- nobranch
48 Don't check the SHA validation for branch. set this option for the recipe
49 referring to commit which is valid in tag instead of branch.
50 The default is "0", set nobranch=1 if needed.
51
52"""
53
54#Copyright (C) 2005 Richard Purdie
55#
56# This program is free software; you can redistribute it and/or modify
57# it under the terms of the GNU General Public License version 2 as
58# published by the Free Software Foundation.
59#
60# This program is distributed in the hope that it will be useful,
61# but WITHOUT ANY WARRANTY; without even the implied warranty of
62# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
63# GNU General Public License for more details.
64#
65# You should have received a copy of the GNU General Public License along
66# with this program; if not, write to the Free Software Foundation, Inc.,
67# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
68
69import os
70import bb
71from bb import data
72from bb.fetch2 import FetchMethod
73from bb.fetch2 import runfetchcmd
74from bb.fetch2 import logger
75
76class Git(FetchMethod):
77 """Class to fetch a module or modules from git repositories"""
78 def init(self, d):
79 pass
80
81 def supports(self, ud, d):
82 """
83 Check to see if a given url can be fetched with git.
84 """
85 return ud.type in ['git']
86
87 def supports_checksum(self, urldata):
88 return False
89
90 def urldata_init(self, ud, d):
91 """
92 init git specific variable within url data
93 so that the git method like latest_revision() can work
94 """
95 if 'protocol' in ud.parm:
96 ud.proto = ud.parm['protocol']
97 elif not ud.host:
98 ud.proto = 'file'
99 else:
100 ud.proto = "git"
101
102 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
103 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
104
105 ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
106
107 ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
108
109 ud.nobranch = ud.parm.get("nobranch","0") == "1"
110
111 # bareclone implies nocheckout
112 ud.bareclone = ud.parm.get("bareclone","0") == "1"
113 if ud.bareclone:
114 ud.nocheckout = 1
115
116 ud.unresolvedrev = {}
117 branches = ud.parm.get("branch", "master").split(',')
118 if len(branches) != len(ud.names):
119 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
120 ud.branches = {}
121 for name in ud.names:
122 branch = branches[ud.names.index(name)]
123 ud.branches[name] = branch
124 ud.unresolvedrev[name] = branch
125
126 ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
127
128 ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
129
130 ud.setup_revisons(d)
131
132 for name in ud.names:
133 # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
134 if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
135 if ud.revisions[name]:
136 ud.unresolvedrev[name] = ud.revisions[name]
137 ud.revisions[name] = self.latest_revision(ud, d, name)
138
139 gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
140 # for rebaseable git repo, it is necessary to keep mirror tar ball
141 # per revision, so that even the revision disappears from the
142 # upstream repo in the future, the mirror will remain intact and still
143 # contains the revision
144 if ud.rebaseable:
145 for name in ud.names:
146 gitsrcname = gitsrcname + '_' + ud.revisions[name]
147 ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
148 ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
149 gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
150 ud.clonedir = os.path.join(gitdir, gitsrcname)
151
152 ud.localfile = ud.clonedir
153
154 def localpath(self, ud, d):
155 return ud.clonedir
156
157 def need_update(self, ud, d):
158 if not os.path.exists(ud.clonedir):
159 return True
160 os.chdir(ud.clonedir)
161 for name in ud.names:
162 if not self._contains_ref(ud, d, name):
163 return True
164 if ud.write_tarballs and not os.path.exists(ud.fullmirror):
165 return True
166 return False
167
168 def try_premirror(self, ud, d):
169 # If we don't do this, updating an existing checkout with only premirrors
170 # is not possible
171 if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
172 return True
173 if os.path.exists(ud.clonedir):
174 return False
175 return True
176
177 def download(self, ud, d):
178 """Fetch url"""
179
180 if ud.user:
181 username = ud.user + '@'
182 else:
183 username = ""
184
185 ud.repochanged = not os.path.exists(ud.fullmirror)
186
187 # If the checkout doesn't exist and the mirror tarball does, extract it
188 if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
189 bb.utils.mkdirhier(ud.clonedir)
190 os.chdir(ud.clonedir)
191 runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
192
193 repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
194
195 # If the repo still doesn't exist, fallback to cloning it
196 if not os.path.exists(ud.clonedir):
197 # We do this since git will use a "-l" option automatically for local urls where possible
198 if repourl.startswith("file://"):
199 repourl = repourl[7:]
200 clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
201 if ud.proto.lower() != 'file':
202 bb.fetch2.check_network_access(d, clone_cmd)
203 runfetchcmd(clone_cmd, d)
204
205 os.chdir(ud.clonedir)
206 # Update the checkout if needed
207 needupdate = False
208 for name in ud.names:
209 if not self._contains_ref(ud, d, name):
210 needupdate = True
211 if needupdate:
212 try:
213 runfetchcmd("%s remote rm origin" % ud.basecmd, d)
214 except bb.fetch2.FetchError:
215 logger.debug(1, "No Origin")
216
217 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
218 fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
219 if ud.proto.lower() != 'file':
220 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
221 runfetchcmd(fetch_cmd, d)
222 runfetchcmd("%s prune-packed" % ud.basecmd, d)
223 runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
224 ud.repochanged = True
225 os.chdir(ud.clonedir)
226 for name in ud.names:
227 if not self._contains_ref(ud, d, name):
228 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
229
230 def build_mirror_data(self, ud, d):
231 # Generate a mirror tarball if needed
232 if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
233 # it's possible that this symlink points to read-only filesystem with PREMIRROR
234 if os.path.islink(ud.fullmirror):
235 os.unlink(ud.fullmirror)
236
237 os.chdir(ud.clonedir)
238 logger.info("Creating tarball of git repository")
239 runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
240 runfetchcmd("touch %s.done" % (ud.fullmirror), d)
241
242 def unpack(self, ud, destdir, d):
243 """ unpack the downloaded src to destdir"""
244
245 subdir = ud.parm.get("subpath", "")
246 if subdir != "":
247 readpathspec = ":%s" % (subdir)
248 def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
249 else:
250 readpathspec = ""
251 def_destsuffix = "git/"
252
253 destsuffix = ud.parm.get("destsuffix", def_destsuffix)
254 destdir = ud.destdir = os.path.join(destdir, destsuffix)
255 if os.path.exists(destdir):
256 bb.utils.prunedir(destdir)
257
258 cloneflags = "-s -n"
259 if ud.bareclone:
260 cloneflags += " --mirror"
261
262 # Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
263 # and you end up with some horrible union of the two when you attempt to clone it
264 # The least invasive workaround seems to be a symlink to the real directory to
265 # fool git into ignoring any .git version that may also be present.
266 #
267 # The issue is fixed in more recent versions of git so we can drop this hack in future
268 # when that version becomes common enough.
269 clonedir = ud.clonedir
270 if not ud.path.endswith(".git"):
271 indirectiondir = destdir[:-1] + ".indirectionsymlink"
272 if os.path.exists(indirectiondir):
273 os.remove(indirectiondir)
274 bb.utils.mkdirhier(os.path.dirname(indirectiondir))
275 os.symlink(ud.clonedir, indirectiondir)
276 clonedir = indirectiondir
277
278 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
279 if not ud.nocheckout:
280 os.chdir(destdir)
281 if subdir != "":
282 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
283 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
284 else:
285 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
286 return True
287
288 def clean(self, ud, d):
289 """ clean the git directory """
290
291 bb.utils.remove(ud.localpath, True)
292 bb.utils.remove(ud.fullmirror)
293 bb.utils.remove(ud.fullmirror + ".done")
294
295 def supports_srcrev(self):
296 return True
297
298 def _contains_ref(self, ud, d, name):
299 cmd = ""
300 if ud.nobranch:
301 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
302 ud.basecmd, ud.revisions[name])
303 else:
304 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
305 ud.basecmd, ud.revisions[name], ud.branches[name])
306 try:
307 output = runfetchcmd(cmd, d, quiet=True)
308 except bb.fetch2.FetchError:
309 return False
310 if len(output.split()) > 1:
311 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
312 return output.split()[0] != "0"
313
314 def _revision_key(self, ud, d, name):
315 """
316 Return a unique key for the url
317 """
318 return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
319
320 def _lsremote(self, ud, d, search):
321 """
322 Run git ls-remote with the specified search string
323 """
324 if ud.user:
325 username = ud.user + '@'
326 else:
327 username = ""
328
329 cmd = "%s ls-remote %s://%s%s%s %s" % \
330 (ud.basecmd, ud.proto, username, ud.host, ud.path, search)
331 if ud.proto.lower() != 'file':
332 bb.fetch2.check_network_access(d, cmd)
333 output = runfetchcmd(cmd, d, True)
334 if not output:
335 raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
336 return output
337
338 def _latest_revision(self, ud, d, name):
339 """
340 Compute the HEAD revision for the url
341 """
342 if ud.unresolvedrev[name][:5] == "refs/":
343 search = "%s %s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
344 else:
345 search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
346 output = self._lsremote(ud, d, search)
347 return output.split()[0]
348
349 def _build_revision(self, ud, d, name):
350 return ud.revisions[name]
351
352 def checkstatus(self, ud, d):
353 fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
354 try:
355 runfetchcmd(fetchcmd, d, quiet=True)
356 return True
357 except FetchError:
358 return False
diff --git a/bitbake/lib/bb/fetch2/gitannex.py b/bitbake/lib/bb/fetch2/gitannex.py
new file mode 100644
index 0000000000..0f37897450
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitannex.py
@@ -0,0 +1,76 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git annex implementation
5"""
6
7# Copyright (C) 2014 Otavio Salvador
8# Copyright (C) 2014 O.S. Systems Software LTDA.
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import os
24import bb
25from bb import data
26from bb.fetch2.git import Git
27from bb.fetch2 import runfetchcmd
28from bb.fetch2 import logger
29
30class GitANNEX(Git):
31 def supports(self, ud, d):
32 """
33 Check to see if a given url can be fetched with git.
34 """
35 return ud.type in ['gitannex']
36
37 def uses_annex(self, ud, d):
38 for name in ud.names:
39 try:
40 runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
41 return True
42 except bb.fetch.FetchError:
43 pass
44
45 return False
46
47 def update_annex(self, ud, d):
48 try:
49 runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
50 except bb.fetch.FetchError:
51 return False
52 runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
53
54 return True
55
56 def download(self, ud, d):
57 Git.download(self, ud, d)
58
59 os.chdir(ud.clonedir)
60 annex = self.uses_annex(ud, d)
61 if annex:
62 self.update_annex(ud, d)
63
64 def unpack(self, ud, destdir, d):
65 Git.unpack(self, ud, destdir, d)
66
67 os.chdir(ud.destdir)
68 try:
69 runfetchcmd("%s annex sync" % (ud.basecmd), d)
70 except bb.fetch.FetchError:
71 pass
72
73 annex = self.uses_annex(ud, d)
74 if annex:
75 runfetchcmd("%s annex get" % (ud.basecmd), d)
76 runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
new file mode 100644
index 0000000000..c125cff54b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -0,0 +1,136 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git submodules implementation
5
6Inherits from and extends the Git fetcher to retrieve submodules of a git repository
7after cloning.
8
9SRC_URI = "gitsm://<see Git fetcher for syntax>"
10
11See the Git fetcher, git://, for usage documentation.
12
13NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
14
15"""
16
17# Copyright (C) 2013 Richard Purdie
18#
19# This program is free software; you can redistribute it and/or modify
20# it under the terms of the GNU General Public License version 2 as
21# published by the Free Software Foundation.
22#
23# This program is distributed in the hope that it will be useful,
24# but WITHOUT ANY WARRANTY; without even the implied warranty of
25# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
26# GNU General Public License for more details.
27#
28# You should have received a copy of the GNU General Public License along
29# with this program; if not, write to the Free Software Foundation, Inc.,
30# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
31
32import os
33import bb
34from bb import data
35from bb.fetch2.git import Git
36from bb.fetch2 import runfetchcmd
37from bb.fetch2 import logger
38
39class GitSM(Git):
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with git.
43 """
44 return ud.type in ['gitsm']
45
46 def uses_submodules(self, ud, d):
47 for name in ud.names:
48 try:
49 runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
50 return True
51 except bb.fetch.FetchError:
52 pass
53 return False
54
55 def _set_relative_paths(self, repopath):
56 """
57 Fix submodule paths to be relative instead of absolute,
58 so that when we move the repo it doesn't break
59 (In Git 1.7.10+ this is done automatically)
60 """
61 submodules = []
62 with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
63 for line in f.readlines():
64 if line.startswith('[submodule'):
65 submodules.append(line.split('"')[1])
66
67 for module in submodules:
68 repo_conf = os.path.join(repopath, module, '.git')
69 if os.path.exists(repo_conf):
70 with open(repo_conf, 'r') as f:
71 lines = f.readlines()
72 newpath = ''
73 for i, line in enumerate(lines):
74 if line.startswith('gitdir:'):
75 oldpath = line.split(': ')[-1].rstrip()
76 if oldpath.startswith('/'):
77 newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
78 lines[i] = 'gitdir: %s\n' % newpath
79 break
80 if newpath:
81 with open(repo_conf, 'w') as f:
82 for line in lines:
83 f.write(line)
84
85 repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
86 if os.path.exists(repo_conf2):
87 with open(repo_conf2, 'r') as f:
88 lines = f.readlines()
89 newpath = ''
90 for i, line in enumerate(lines):
91 if line.lstrip().startswith('worktree = '):
92 oldpath = line.split(' = ')[-1].rstrip()
93 if oldpath.startswith('/'):
94 newpath = '../' * (module.count('/') + 3) + module
95 lines[i] = '\tworktree = %s\n' % newpath
96 break
97 if newpath:
98 with open(repo_conf2, 'w') as f:
99 for line in lines:
100 f.write(line)
101
102 def update_submodules(self, ud, d):
103 # We have to convert bare -> full repo, do the submodule bit, then convert back
104 tmpclonedir = ud.clonedir + ".tmp"
105 gitdir = tmpclonedir + os.sep + ".git"
106 bb.utils.remove(tmpclonedir, True)
107 os.mkdir(tmpclonedir)
108 os.rename(ud.clonedir, gitdir)
109 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
110 os.chdir(tmpclonedir)
111 runfetchcmd(ud.basecmd + " reset --hard", d)
112 runfetchcmd(ud.basecmd + " submodule init", d)
113 runfetchcmd(ud.basecmd + " submodule update", d)
114 self._set_relative_paths(tmpclonedir)
115 runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
116 os.rename(gitdir, ud.clonedir,)
117 bb.utils.remove(tmpclonedir, True)
118
119 def download(self, ud, d):
120 Git.download(self, ud, d)
121
122 os.chdir(ud.clonedir)
123 submodules = self.uses_submodules(ud, d)
124 if submodules:
125 self.update_submodules(ud, d)
126
127 def unpack(self, ud, destdir, d):
128 Git.unpack(self, ud, destdir, d)
129
130 os.chdir(ud.destdir)
131 submodules = self.uses_submodules(ud, d)
132 if submodules:
133 runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d)
134 runfetchcmd(ud.basecmd + " submodule init", d)
135 runfetchcmd(ud.basecmd + " submodule update", d)
136
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
new file mode 100644
index 0000000000..81592f6e04
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -0,0 +1,193 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for mercurial DRCS (hg).
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10# Copyright (C) 2007 Robert Schuster
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27import os
28import sys
29import logging
30import bb
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import runfetchcmd
36from bb.fetch2 import logger
37
38class Hg(FetchMethod):
39 """Class to fetch from mercurial repositories"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with mercurial.
43 """
44 return ud.type in ['hg']
45
46 def urldata_init(self, ud, d):
47 """
48 init hg specific variable within url data
49 """
50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url)
52
53 ud.module = ud.parm["module"]
54
55 # Create paths to mercurial checkouts
56 relpath = self._strip_leading_slashes(ud.path)
57 ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
58 ud.moddir = os.path.join(ud.pkgdir, ud.module)
59
60 ud.setup_revisons(d)
61
62 if 'rev' in ud.parm:
63 ud.revision = ud.parm['rev']
64 elif not ud.revision:
65 ud.revision = self.latest_revision(ud, d)
66
67 ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
68
69 def need_update(self, ud, d):
70 revTag = ud.parm.get('rev', 'tip')
71 if revTag == "tip":
72 return True
73 if not os.path.exists(ud.localpath):
74 return True
75 return False
76
77 def _buildhgcommand(self, ud, d, command):
78 """
79 Build up an hg commandline based on ud
80 command is "fetch", "update", "info"
81 """
82
83 basecmd = data.expand('${FETCHCMD_hg}', d)
84
85 proto = ud.parm.get('protocol', 'http')
86
87 host = ud.host
88 if proto == "file":
89 host = "/"
90 ud.host = "localhost"
91
92 if not ud.user:
93 hgroot = host + ud.path
94 else:
95 if ud.pswd:
96 hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path
97 else:
98 hgroot = ud.user + "@" + host + ud.path
99
100 if command == "info":
101 return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
102
103 options = [];
104
105 # Don't specify revision for the fetch; clone the entire repo.
106 # This avoids an issue if the specified revision is a tag, because
107 # the tag actually exists in the specified revision + 1, so it won't
108 # be available when used in any successive commands.
109 if ud.revision and command != "fetch":
110 options.append("-r %s" % ud.revision)
111
112 if command == "fetch":
113 if ud.user and ud.pswd:
114 cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
115 else:
116 cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
117 elif command == "pull":
118 # do not pass options list; limiting pull to rev causes the local
119 # repo not to contain it and immediately following "update" command
120 # will crash
121 if ud.user and ud.pswd:
122 cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto)
123 else:
124 cmd = "%s pull" % (basecmd)
125 elif command == "update":
126 if ud.user and ud.pswd:
127 cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
128 else:
129 cmd = "%s update -C %s" % (basecmd, " ".join(options))
130 else:
131 raise FetchError("Invalid hg command %s" % command, ud.url)
132
133 return cmd
134
135 def download(self, ud, d):
136 """Fetch url"""
137
138 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
139
140 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
141 updatecmd = self._buildhgcommand(ud, d, "pull")
142 logger.info("Update " + ud.url)
143 # update sources there
144 os.chdir(ud.moddir)
145 logger.debug(1, "Running %s", updatecmd)
146 bb.fetch2.check_network_access(d, updatecmd, ud.url)
147 runfetchcmd(updatecmd, d)
148
149 else:
150 fetchcmd = self._buildhgcommand(ud, d, "fetch")
151 logger.info("Fetch " + ud.url)
152 # check out sources there
153 bb.utils.mkdirhier(ud.pkgdir)
154 os.chdir(ud.pkgdir)
155 logger.debug(1, "Running %s", fetchcmd)
156 bb.fetch2.check_network_access(d, fetchcmd, ud.url)
157 runfetchcmd(fetchcmd, d)
158
159 # Even when we clone (fetch), we still need to update as hg's clone
160 # won't checkout the specified revision if its on a branch
161 updatecmd = self._buildhgcommand(ud, d, "update")
162 os.chdir(ud.moddir)
163 logger.debug(1, "Running %s", updatecmd)
164 runfetchcmd(updatecmd, d)
165
166 scmdata = ud.parm.get("scmdata", "")
167 if scmdata == "keep":
168 tar_flags = ""
169 else:
170 tar_flags = "--exclude '.hg' --exclude '.hgrags'"
171
172 os.chdir(ud.pkgdir)
173 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
174
175 def supports_srcrev(self):
176 return True
177
178 def _latest_revision(self, ud, d, name):
179 """
180 Compute tip revision for the url
181 """
182 bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
183 output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
184 return output.strip()
185
186 def _build_revision(self, ud, d, name):
187 return ud.revision
188
189 def _revision_key(self, ud, d, name):
190 """
191 Return a unique key for the url
192 """
193 return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
new file mode 100644
index 0000000000..0785236a6b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -0,0 +1,128 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import urllib
30import bb
31import bb.utils
32from bb import data
33from bb.fetch2 import FetchMethod, FetchError
34from bb.fetch2 import logger
35
36class Local(FetchMethod):
37 def supports(self, urldata, d):
38 """
39 Check to see if a given url represents a local fetch.
40 """
41 return urldata.type in ['file']
42
43 def urldata_init(self, ud, d):
44 # We don't set localfile as for this fetcher the file is already local!
45 ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
46 ud.basename = os.path.basename(ud.decodedurl)
47 ud.basepath = ud.decodedurl
48 return
49
50 def localpath(self, urldata, d):
51 """
52 Return the local filename of a given url assuming a successful fetch.
53 """
54 return self.localpaths(urldata, d)[-1]
55
56 def localpaths(self, urldata, d):
57 """
58 Return the local filename of a given url assuming a successful fetch.
59 """
60 searched = []
61 path = urldata.decodedurl
62 newpath = path
63 if path[0] == "/":
64 return [path]
65 filespath = data.getVar('FILESPATH', d, True)
66 if filespath:
67 logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
68 newpath, hist = bb.utils.which(filespath, path, history=True)
69 searched.extend(hist)
70 if not newpath:
71 filesdir = data.getVar('FILESDIR', d, True)
72 if filesdir:
73 logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
74 newpath = os.path.join(filesdir, path)
75 searched.append(newpath)
76 if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
77 # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
78 newpath, hist = bb.utils.which(filespath, ".", history=True)
79 searched.extend(hist)
80 logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
81 return searched
82 if not os.path.exists(newpath):
83 dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
84 logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
85 bb.utils.mkdirhier(os.path.dirname(dldirfile))
86 searched.append(dldirfile)
87 return searched
88 return searched
89
90 def need_update(self, ud, d):
91 if ud.url.find("*") != -1:
92 return False
93 if os.path.exists(ud.localpath):
94 return False
95 return True
96
97 def download(self, urldata, d):
98 """Fetch urls (no-op for Local method)"""
99 # no need to fetch local files, we'll deal with them in place.
100 if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
101 locations = []
102 filespath = data.getVar('FILESPATH', d, True)
103 if filespath:
104 locations = filespath.split(":")
105 filesdir = data.getVar('FILESDIR', d, True)
106 if filesdir:
107 locations.append(filesdir)
108 locations.append(d.getVar("DL_DIR", True))
109
110 msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
111 raise FetchError(msg)
112
113 return True
114
115 def checkstatus(self, urldata, d):
116 """
117 Check the status of the url
118 """
119 if urldata.localpath.find("*") != -1:
120 logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
121 return True
122 if os.path.exists(urldata.localpath):
123 return True
124 return False
125
126 def clean(self, urldata, d):
127 return
128
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
new file mode 100644
index 0000000000..3d8779682f
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -0,0 +1,135 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4Bitbake "Fetch" implementation for osc (Opensuse build service client).
5Based on the svn "Fetch" implementation.
6
7"""
8
9import os
10import sys
11import logging
12import bb
13from bb import data
14from bb.fetch2 import FetchMethod
15from bb.fetch2 import FetchError
16from bb.fetch2 import MissingParameterError
17from bb.fetch2 import runfetchcmd
18
19class Osc(FetchMethod):
20 """Class to fetch a module or modules from Opensuse build server
21 repositories."""
22
23 def supports(self, ud, d):
24 """
25 Check to see if a given url can be fetched with osc.
26 """
27 return ud.type in ['osc']
28
29 def urldata_init(self, ud, d):
30 if not "module" in ud.parm:
31 raise MissingParameterError('module', ud.url)
32
33 ud.module = ud.parm["module"]
34
35 # Create paths to osc checkouts
36 relpath = self._strip_leading_slashes(ud.path)
37 ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
38 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
39
40 if 'rev' in ud.parm:
41 ud.revision = ud.parm['rev']
42 else:
43 pv = data.getVar("PV", d, 0)
44 rev = bb.fetch2.srcrev_internal_helper(ud, d)
45 if rev and rev != True:
46 ud.revision = rev
47 else:
48 ud.revision = ""
49
50 ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
51
52 def _buildosccommand(self, ud, d, command):
53 """
54 Build up an ocs commandline based on ud
55 command is "fetch", "update", "info"
56 """
57
58 basecmd = data.expand('${FETCHCMD_osc}', d)
59
60 proto = ud.parm.get('protocol', 'ocs')
61
62 options = []
63
64 config = "-c %s" % self.generate_config(ud, d)
65
66 if ud.revision:
67 options.append("-r %s" % ud.revision)
68
69 coroot = self._strip_leading_slashes(ud.path)
70
71 if command == "fetch":
72 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
73 elif command == "update":
74 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
75 else:
76 raise FetchError("Invalid osc command %s" % command, ud.url)
77
78 return osccmd
79
80 def download(self, ud, d):
81 """
82 Fetch url
83 """
84
85 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
86
87 if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
88 oscupdatecmd = self._buildosccommand(ud, d, "update")
89 logger.info("Update "+ ud.url)
90 # update sources there
91 os.chdir(ud.moddir)
92 logger.debug(1, "Running %s", oscupdatecmd)
93 bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
94 runfetchcmd(oscupdatecmd, d)
95 else:
96 oscfetchcmd = self._buildosccommand(ud, d, "fetch")
97 logger.info("Fetch " + ud.url)
98 # check out sources there
99 bb.utils.mkdirhier(ud.pkgdir)
100 os.chdir(ud.pkgdir)
101 logger.debug(1, "Running %s", oscfetchcmd)
102 bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
103 runfetchcmd(oscfetchcmd, d)
104
105 os.chdir(os.path.join(ud.pkgdir + ud.path))
106 # tar them up to a defined filename
107 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
108
109 def supports_srcrev(self):
110 return False
111
112 def generate_config(self, ud, d):
113 """
114 Generate a .oscrc to be used for this run.
115 """
116
117 config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
118 if (os.path.exists(config_path)):
119 os.remove(config_path)
120
121 f = open(config_path, 'w')
122 f.write("[general]\n")
123 f.write("apisrv = %s\n" % ud.host)
124 f.write("scheme = http\n")
125 f.write("su-wrapper = su -c\n")
126 f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
127 f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
128 f.write("extra-pkgs = gzip\n")
129 f.write("\n")
130 f.write("[%s]\n" % ud.host)
131 f.write("user = %s\n" % ud.parm["user"])
132 f.write("pass = %s\n" % ud.parm["pswd"])
133 f.close()
134
135 return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
new file mode 100644
index 0000000000..d079a33c62
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -0,0 +1,187 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from future_builtins import zip
29import os
30import subprocess
31import logging
32import bb
33from bb import data
34from bb.fetch2 import FetchMethod
35from bb.fetch2 import FetchError
36from bb.fetch2 import logger
37from bb.fetch2 import runfetchcmd
38
39class Perforce(FetchMethod):
40 def supports(self, ud, d):
41 return ud.type in ['p4']
42
43 def doparse(url, d):
44 parm = {}
45 path = url.split("://")[1]
46 delim = path.find("@");
47 if delim != -1:
48 (user, pswd, host, port) = path.split('@')[0].split(":")
49 path = path.split('@')[1]
50 else:
51 (host, port) = d.getVar('P4PORT').split(':')
52 user = ""
53 pswd = ""
54
55 if path.find(";") != -1:
56 keys=[]
57 values=[]
58 plist = path.split(';')
59 for item in plist:
60 if item.count('='):
61 (key, value) = item.split('=')
62 keys.append(key)
63 values.append(value)
64
65 parm = dict(zip(keys, values))
66 path = "//" + path.split(';')[0]
67 host += ":%s" % (port)
68 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
69
70 return host, path, user, pswd, parm
71 doparse = staticmethod(doparse)
72
73 def getcset(d, depot, host, user, pswd, parm):
74 p4opt = ""
75 if "cset" in parm:
76 return parm["cset"];
77 if user:
78 p4opt += " -u %s" % (user)
79 if pswd:
80 p4opt += " -P %s" % (pswd)
81 if host:
82 p4opt += " -p %s" % (host)
83
84 p4date = d.getVar("P4DATE", True)
85 if "revision" in parm:
86 depot += "#%s" % (parm["revision"])
87 elif "label" in parm:
88 depot += "@%s" % (parm["label"])
89 elif p4date:
90 depot += "@%s" % (p4date)
91
92 p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
93 logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
94 p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
95 cset = p4file.strip()
96 logger.debug(1, "READ %s", cset)
97 if not cset:
98 return -1
99
100 return cset.split(' ')[1]
101 getcset = staticmethod(getcset)
102
103 def urldata_init(self, ud, d):
104 (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
105
106 base_path = path.replace('/...', '')
107 base_path = self._strip_leading_slashes(base_path)
108
109 if "label" in parm:
110 version = parm["label"]
111 else:
112 version = Perforce.getcset(d, path, host, user, pswd, parm)
113
114 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
115
116 def download(self, ud, d):
117 """
118 Fetch urls
119 """
120
121 (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
122
123 if depot.find('/...') != -1:
124 path = depot[:depot.find('/...')]
125 else:
126 path = depot
127
128 module = parm.get('module', os.path.basename(path))
129
130 # Get the p4 command
131 p4opt = ""
132 if user:
133 p4opt += " -u %s" % (user)
134
135 if pswd:
136 p4opt += " -P %s" % (pswd)
137
138 if host:
139 p4opt += " -p %s" % (host)
140
141 p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
142
143 # create temp directory
144 logger.debug(2, "Fetch: creating temporary directory")
145 bb.utils.mkdirhier(d.expand('${WORKDIR}'))
146 mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
147 tmpfile, errors = bb.process.run(mktemp)
148 tmpfile = tmpfile.strip()
149 if not tmpfile:
150 raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
151
152 if "label" in parm:
153 depot = "%s@%s" % (depot, parm["label"])
154 else:
155 cset = Perforce.getcset(d, depot, host, user, pswd, parm)
156 depot = "%s@%s" % (depot, cset)
157
158 os.chdir(tmpfile)
159 logger.info("Fetch " + ud.url)
160 logger.info("%s%s files %s", p4cmd, p4opt, depot)
161 p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
162 p4file = [f.rstrip() for f in p4file.splitlines()]
163
164 if not p4file:
165 raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
166
167 count = 0
168
169 for file in p4file:
170 list = file.split()
171
172 if list[2] == "delete":
173 continue
174
175 dest = list[0][len(path)+1:]
176 where = dest.find("#")
177
178 subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
179 count = count + 1
180
181 if count == 0:
182 logger.error()
183 raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
184
185 runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
186 # cleanup
187 bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
new file mode 100644
index 0000000000..21678eb7d9
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/repo.py
@@ -0,0 +1,98 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake "Fetch" repo (git) implementation
5
6"""
7
8# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
9#
10# Based on git.py which is:
11#Copyright (C) 2005 Richard Purdie
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import bb
28from bb import data
29from bb.fetch2 import FetchMethod
30from bb.fetch2 import runfetchcmd
31
32class Repo(FetchMethod):
33 """Class to fetch a module or modules from repo (git) repositories"""
34 def supports(self, ud, d):
35 """
36 Check to see if a given url can be fetched with repo.
37 """
38 return ud.type in ["repo"]
39
40 def urldata_init(self, ud, d):
41 """
42 We don"t care about the git rev of the manifests repository, but
43 we do care about the manifest to use. The default is "default".
44 We also care about the branch or tag to be used. The default is
45 "master".
46 """
47
48 ud.proto = ud.parm.get('protocol', 'git')
49 ud.branch = ud.parm.get('branch', 'master')
50 ud.manifest = ud.parm.get('manifest', 'default.xml')
51 if not ud.manifest.endswith('.xml'):
52 ud.manifest += '.xml'
53
54 ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
55
56 def download(self, ud, d):
57 """Fetch url"""
58
59 if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
60 logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
61 return
62
63 gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
64 repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
65 codir = os.path.join(repodir, gitsrcname, ud.manifest)
66
67 if ud.user:
68 username = ud.user + "@"
69 else:
70 username = ""
71
72 bb.utils.mkdirhier(os.path.join(codir, "repo"))
73 os.chdir(os.path.join(codir, "repo"))
74 if not os.path.exists(os.path.join(codir, "repo", ".repo")):
75 bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
76 runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
77
78 bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
79 runfetchcmd("repo sync", d)
80 os.chdir(codir)
81
82 scmdata = ud.parm.get("scmdata", "")
83 if scmdata == "keep":
84 tar_flags = ""
85 else:
86 tar_flags = "--exclude '.repo' --exclude '.git'"
87
88 # Create a cache
89 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
90
91 def supports_srcrev(self):
92 return False
93
94 def _build_revision(self, ud, d):
95 return ud.manifest
96
97 def _want_sortable_revision(self, ud, d):
98 return False
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
new file mode 100644
index 0000000000..8ea4ef2ff3
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -0,0 +1,129 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake SFTP Fetch implementation
5
6Class for fetching files via SFTP. It tries to adhere to the (now
7expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
8Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
9(SSH)" (SECSH URI).
10
11It uses SFTP (as to adhere to the SECSH URI specification). It only
12supports key based authentication, not password. This class, unlike
13the SSH fetcher, does not support fetching a directory tree from the
14remote.
15
16 http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
17 https://www.iana.org/assignments/uri-schemes/prov/sftp
18 https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
19
20Please note that '/' is used as host path seperator, and not ":"
21as you may be used to from the scp/sftp commands. You can use a
22~ (tilde) to specify a path relative to your home directory.
23(The /~user/ syntax, for specyfing a path relative to another
24user's home directory is not supported.) Note that the tilde must
25still follow the host path seperator ("/"). See exampels below.
26
27Example SRC_URIs:
28
29SRC_URI = "sftp://host.example.com/dir/path.file.txt"
30
31A path relative to your home directory.
32
33SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
34
35You can also specify a username (specyfing password in the
36URI is not supported, use SSH keys to authenticate):
37
38SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
39
40"""
41
42# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
43#
44# Based in part on bb.fetch2.wget:
45# Copyright (C) 2003, 2004 Chris Larson
46#
47# This program is free software; you can redistribute it and/or modify
48# it under the terms of the GNU General Public License version 2 as
49# published by the Free Software Foundation.
50#
51# This program is distributed in the hope that it will be useful,
52# but WITHOUT ANY WARRANTY; without even the implied warranty of
53# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
54# GNU General Public License for more details.
55#
56# You should have received a copy of the GNU General Public License along
57# with this program; if not, write to the Free Software Foundation, Inc.,
58# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
59#
60# Based on functions from the base bb module, Copyright 2003 Holger Schurig
61
62import os
63import bb
64import urllib
65import commands
66from bb import data
67from bb.fetch2 import URI
68from bb.fetch2 import FetchMethod
69from bb.fetch2 import runfetchcmd
70
71
72class SFTP(FetchMethod):
73 """Class to fetch urls via 'sftp'"""
74
75 def supports(self, ud, d):
76 """
77 Check to see if a given url can be fetched with sftp.
78 """
79 return ud.type in ['sftp']
80
81 def recommends_checksum(self, urldata):
82 return True
83
84 def urldata_init(self, ud, d):
85 if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
86 raise bb.fetch2.ParameterError(
87 "Invalid protocol - if you wish to fetch from a " +
88 "git repository using ssh, you need to use the " +
89 "git:// prefix with protocol=ssh", ud.url)
90
91 if 'downloadfilename' in ud.parm:
92 ud.basename = ud.parm['downloadfilename']
93 else:
94 ud.basename = os.path.basename(ud.path)
95
96 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
97
98 def download(self, ud, d):
99 """Fetch urls"""
100
101 urlo = URI(ud.url)
102 basecmd = 'sftp -oPasswordAuthentication=no'
103 port = ''
104 if urlo.port:
105 port = '-P %d' % urlo.port
106 urlo.port = None
107
108 dldir = data.getVar('DL_DIR', d, True)
109 lpath = os.path.join(dldir, ud.localfile)
110
111 user = ''
112 if urlo.userinfo:
113 user = urlo.userinfo + '@'
114
115 path = urlo.path
116
117 # Supoprt URIs relative to the user's home directory, with
118 # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
119 if path[:3] == '/~/':
120 path = path[3:]
121
122 remote = '%s%s:%s' % (user, urlo.hostname, path)
123
124 cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
125 commands.mkarg(lpath))
126
127 bb.fetch2.check_network_access(d, cmd, ud.url)
128 runfetchcmd(cmd, d)
129 return True
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
new file mode 100644
index 0000000000..4ae979472c
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -0,0 +1,127 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3'''
4BitBake 'Fetch' implementations
5
6This implementation is for Secure Shell (SSH), and attempts to comply with the
7IETF secsh internet draft:
8 http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
9
10 Currently does not support the sftp parameters, as this uses scp
11 Also does not support the 'fingerprint' connection parameter.
12
13 Please note that '/' is used as host, path separator not ':' as you may
14 be used to, also '~' can be used to specify user HOME, but again after '/'
15
16 Example SRC_URI:
17 SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
18 SRC_URI = "ssh://user@host.example.com/~/file.txt"
19'''
20
21# Copyright (C) 2006 OpenedHand Ltd.
22#
23#
24# Based in part on svk.py:
25# Copyright (C) 2006 Holger Hans Peter Freyther
26# Based on svn.py:
27# Copyright (C) 2003, 2004 Chris Larson
28# Based on functions from the base bb module:
29# Copyright 2003 Holger Schurig
30#
31#
32# This program is free software; you can redistribute it and/or modify
33# it under the terms of the GNU General Public License version 2 as
34# published by the Free Software Foundation.
35#
36# This program is distributed in the hope that it will be useful,
37# but WITHOUT ANY WARRANTY; without even the implied warranty of
38# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
39# GNU General Public License for more details.
40#
41# You should have received a copy of the GNU General Public License along
42# with this program; if not, write to the Free Software Foundation, Inc.,
43# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
44
45import re, os
46from bb import data
47from bb.fetch2 import FetchMethod
48from bb.fetch2 import FetchError
49from bb.fetch2 import logger
50from bb.fetch2 import runfetchcmd
51
52
53__pattern__ = re.compile(r'''
54 \s* # Skip leading whitespace
55 ssh:// # scheme
56 ( # Optional username/password block
57 (?P<user>\S+) # username
58 (:(?P<pass>\S+))? # colon followed by the password (optional)
59 )?
60 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
61 @
62 (?P<host>\S+?) # non-greedy match of the host
63 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
64 /
65 (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
66 # and may include the use of '~' to reference the remote home
67 # directory
68 (?P<sparam>(;[^;]+)*)? # parameters block (optional)
69 $
70''', re.VERBOSE)
71
72class SSH(FetchMethod):
73 '''Class to fetch a module or modules via Secure Shell'''
74
75 def supports(self, urldata, d):
76 return __pattern__.match(urldata.url) != None
77
78 def supports_checksum(self, urldata):
79 return False
80
81 def urldata_init(self, urldata, d):
82 if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
83 raise bb.fetch2.ParameterError(
84 "Invalid protocol - if you wish to fetch from a git " +
85 "repository using ssh, you need to use " +
86 "git:// prefix with protocol=ssh", urldata.url)
87 m = __pattern__.match(urldata.url)
88 path = m.group('path')
89 host = m.group('host')
90 urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
91
92 def download(self, urldata, d):
93 dldir = d.getVar('DL_DIR', True)
94
95 m = __pattern__.match(urldata.url)
96 path = m.group('path')
97 host = m.group('host')
98 port = m.group('port')
99 user = m.group('user')
100 password = m.group('pass')
101
102 if port:
103 portarg = '-P %s' % port
104 else:
105 portarg = ''
106
107 if user:
108 fr = user
109 if password:
110 fr += ':%s' % password
111 fr += '@%s' % host
112 else:
113 fr = host
114 fr += ':%s' % path
115
116
117 import commands
118 cmd = 'scp -B -r %s %s %s/' % (
119 portarg,
120 commands.mkarg(fr),
121 commands.mkarg(dldir)
122 )
123
124 bb.fetch2.check_network_access(d, cmd, urldata.url)
125
126 runfetchcmd(cmd, d)
127
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
new file mode 100644
index 0000000000..1733c2beb6
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -0,0 +1,192 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for svn.
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23#
24# Based on functions from the base bb module, Copyright 2003 Holger Schurig
25
26import os
27import sys
28import logging
29import bb
30import re
31from bb import data
32from bb.fetch2 import FetchMethod
33from bb.fetch2 import FetchError
34from bb.fetch2 import MissingParameterError
35from bb.fetch2 import runfetchcmd
36from bb.fetch2 import logger
37
38class Svn(FetchMethod):
39 """Class to fetch a module or modules from svn repositories"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with svn.
43 """
44 return ud.type in ['svn']
45
46 def urldata_init(self, ud, d):
47 """
48 init svn specific variable within url data
49 """
50 if not "module" in ud.parm:
51 raise MissingParameterError('module', ud.url)
52
53 ud.basecmd = d.getVar('FETCHCMD_svn', True)
54
55 ud.module = ud.parm["module"]
56
57 # Create paths to svn checkouts
58 relpath = self._strip_leading_slashes(ud.path)
59 ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
60 ud.moddir = os.path.join(ud.pkgdir, ud.module)
61
62 ud.setup_revisons(d)
63
64 if 'rev' in ud.parm:
65 ud.revision = ud.parm['rev']
66
67 ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
68
69 def _buildsvncommand(self, ud, d, command):
70 """
71 Build up an svn commandline based on ud
72 command is "fetch", "update", "info"
73 """
74
75 proto = ud.parm.get('protocol', 'svn')
76
77 svn_rsh = None
78 if proto == "svn+ssh" and "rsh" in ud.parm:
79 svn_rsh = ud.parm["rsh"]
80
81 svnroot = ud.host + ud.path
82
83 options = []
84
85 options.append("--no-auth-cache")
86
87 if ud.user:
88 options.append("--username %s" % ud.user)
89
90 if ud.pswd:
91 options.append("--password %s" % ud.pswd)
92
93 if command == "info":
94 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
95 elif command == "log1":
96 svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
97 else:
98 suffix = ""
99 if ud.revision:
100 options.append("-r %s" % ud.revision)
101 suffix = "@%s" % (ud.revision)
102
103 if command == "fetch":
104 transportuser = ud.parm.get("transportuser", "")
105 svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.module)
106 elif command == "update":
107 svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
108 else:
109 raise FetchError("Invalid svn command %s" % command, ud.url)
110
111 if svn_rsh:
112 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
113
114 return svncmd
115
116 def download(self, ud, d):
117 """Fetch url"""
118
119 logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
120
121 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
122 svnupdatecmd = self._buildsvncommand(ud, d, "update")
123 logger.info("Update " + ud.url)
124 # update sources there
125 os.chdir(ud.moddir)
126 # We need to attempt to run svn upgrade first in case its an older working format
127 try:
128 runfetchcmd(ud.basecmd + " upgrade", d)
129 except FetchError:
130 pass
131 logger.debug(1, "Running %s", svnupdatecmd)
132 bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
133 runfetchcmd(svnupdatecmd, d)
134 else:
135 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
136 logger.info("Fetch " + ud.url)
137 # check out sources there
138 bb.utils.mkdirhier(ud.pkgdir)
139 os.chdir(ud.pkgdir)
140 logger.debug(1, "Running %s", svnfetchcmd)
141 bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
142 runfetchcmd(svnfetchcmd, d)
143
144 scmdata = ud.parm.get("scmdata", "")
145 if scmdata == "keep":
146 tar_flags = ""
147 else:
148 tar_flags = "--exclude '.svn'"
149
150 os.chdir(ud.pkgdir)
151 # tar them up to a defined filename
152 runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
153
154 def clean(self, ud, d):
155 """ Clean SVN specific files and dirs """
156
157 bb.utils.remove(ud.localpath)
158 bb.utils.remove(ud.moddir, True)
159
160
161 def supports_srcrev(self):
162 return True
163
164 def _revision_key(self, ud, d, name):
165 """
166 Return a unique key for the url
167 """
168 return "svn:" + ud.moddir
169
170 def _latest_revision(self, ud, d, name):
171 """
172 Return the latest upstream revision number
173 """
174 bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"))
175
176 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True)
177
178 # skip the first line, as per output of svn log
179 # then we expect the revision on the 2nd line
180 revision = re.search('^r([0-9]*)', output.splitlines()[1]).group(1)
181
182 return revision
183
184 def sortable_revision(self, ud, d, name):
185 """
186 Return a sortable revision number which in our case is the revision number
187 """
188
189 return False, self._build_revision(ud, d)
190
191 def _build_revision(self, ud, d):
192 return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
new file mode 100644
index 0000000000..0456490368
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -0,0 +1,106 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import logging
30import bb
31import urllib
32from bb import data
33from bb.fetch2 import FetchMethod
34from bb.fetch2 import FetchError
35from bb.fetch2 import logger
36from bb.fetch2 import runfetchcmd
37
38class Wget(FetchMethod):
39 """Class to fetch urls via 'wget'"""
40 def supports(self, ud, d):
41 """
42 Check to see if a given url can be fetched with wget.
43 """
44 return ud.type in ['http', 'https', 'ftp']
45
46 def recommends_checksum(self, urldata):
47 return True
48
49 def urldata_init(self, ud, d):
50 if 'protocol' in ud.parm:
51 if ud.parm['protocol'] == 'git':
52 raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
53
54 if 'downloadfilename' in ud.parm:
55 ud.basename = ud.parm['downloadfilename']
56 else:
57 ud.basename = os.path.basename(ud.path)
58
59 ud.localfile = data.expand(urllib.unquote(ud.basename), d)
60
61 self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
62
63 def _runwget(self, ud, d, command, quiet):
64
65 logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
66 bb.fetch2.check_network_access(d, command)
67 runfetchcmd(command, d, quiet)
68
69 def download(self, ud, d):
70 """Fetch urls"""
71
72 fetchcmd = self.basecmd
73
74 if 'downloadfilename' in ud.parm:
75 dldir = d.getVar("DL_DIR", True)
76 bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
77 fetchcmd += " -O " + dldir + os.sep + ud.localfile
78
79 uri = ud.url.split(";")[0]
80 if os.path.exists(ud.localpath):
81 # file exists, but we didnt complete it.. trying again..
82 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
83 else:
84 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
85
86 self._runwget(ud, d, fetchcmd, False)
87
88 # Sanity check since wget can pretend it succeed when it didn't
89 # Also, this used to happen if sourceforge sent us to the mirror page
90 if not os.path.exists(ud.localpath):
91 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
92
93 if os.path.getsize(ud.localpath) == 0:
94 os.remove(ud.localpath)
95 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
96
97 return True
98
99 def checkstatus(self, ud, d):
100
101 uri = ud.url.split(";")[0]
102 fetchcmd = self.basecmd + " --spider '%s'" % uri
103
104 self._runwget(ud, d, fetchcmd, True)
105
106 return True