From c527fd1f14c27855a37f2e8ac5346ce8d940ced2 Mon Sep 17 00:00:00 2001 From: Tudor Florea Date: Thu, 16 Oct 2014 03:05:19 +0200 Subject: initial commit for Enea Linux 4.0-140929 Migrated from the internal git server on the daisy-enea-point-release branch Signed-off-by: Tudor Florea --- bitbake/lib/bb/fetch2/__init__.py | 1575 +++++++++++++++++++++++++++++++++++++ bitbake/lib/bb/fetch2/bzr.py | 143 ++++ bitbake/lib/bb/fetch2/cvs.py | 171 ++++ bitbake/lib/bb/fetch2/git.py | 355 +++++++++ bitbake/lib/bb/fetch2/gitannex.py | 76 ++ bitbake/lib/bb/fetch2/gitsm.py | 126 +++ bitbake/lib/bb/fetch2/hg.py | 187 +++++ bitbake/lib/bb/fetch2/local.py | 116 +++ bitbake/lib/bb/fetch2/osc.py | 135 ++++ bitbake/lib/bb/fetch2/perforce.py | 194 +++++ bitbake/lib/bb/fetch2/repo.py | 98 +++ bitbake/lib/bb/fetch2/sftp.py | 129 +++ bitbake/lib/bb/fetch2/ssh.py | 127 +++ bitbake/lib/bb/fetch2/svn.py | 191 +++++ bitbake/lib/bb/fetch2/wget.py | 106 +++ 15 files changed, 3729 insertions(+) create mode 100644 bitbake/lib/bb/fetch2/__init__.py create mode 100644 bitbake/lib/bb/fetch2/bzr.py create mode 100644 bitbake/lib/bb/fetch2/cvs.py create mode 100644 bitbake/lib/bb/fetch2/git.py create mode 100644 bitbake/lib/bb/fetch2/gitannex.py create mode 100644 bitbake/lib/bb/fetch2/gitsm.py create mode 100644 bitbake/lib/bb/fetch2/hg.py create mode 100644 bitbake/lib/bb/fetch2/local.py create mode 100644 bitbake/lib/bb/fetch2/osc.py create mode 100644 bitbake/lib/bb/fetch2/perforce.py create mode 100644 bitbake/lib/bb/fetch2/repo.py create mode 100644 bitbake/lib/bb/fetch2/sftp.py create mode 100644 bitbake/lib/bb/fetch2/ssh.py create mode 100644 bitbake/lib/bb/fetch2/svn.py create mode 100644 bitbake/lib/bb/fetch2/wget.py (limited to 'bitbake/lib/bb/fetch2') diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py new file mode 100644 index 0000000000..5a03a0e46e --- /dev/null +++ b/bitbake/lib/bb/fetch2/__init__.py @@ -0,0 +1,1575 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. +""" + +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2012 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +from __future__ import absolute_import +from __future__ import print_function +import os, re +import signal +import glob +import logging +import urllib +import urlparse +import operator +import bb.persist_data, bb.utils +import bb.checksum +from bb import data +import bb.process +import subprocess + +__version__ = "2" +_checksum_cache = bb.checksum.FileChecksumCache() + +logger = logging.getLogger("BitBake.Fetcher") + +class BBFetchException(Exception): + """Class all fetch exceptions inherit from""" + def __init__(self, message): + self.msg = message + Exception.__init__(self, message) + + def __str__(self): + return self.msg + +class MalformedUrl(BBFetchException): + """Exception raised when encountering an invalid url""" + def __init__(self, url): + msg = "The URL: '%s' is invalid and cannot be interpreted" % url + self.url = url + BBFetchException.__init__(self, msg) + self.args = (url,) + +class FetchError(BBFetchException): + """General fetcher exception when something happens incorrectly""" + def __init__(self, message, url = None): + if url: + msg = "Fetcher failure for URL: '%s'. %s" % (url, message) + else: + msg = "Fetcher failure: %s" % message + self.url = url + BBFetchException.__init__(self, msg) + self.args = (message, url) + +class ChecksumError(FetchError): + """Exception when mismatched checksum encountered""" + def __init__(self, message, url = None, checksum = None): + self.checksum = checksum + FetchError.__init__(self, message, url) + +class NoChecksumError(FetchError): + """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set""" + +class UnpackError(BBFetchException): + """General fetcher exception when something happens incorrectly when unpacking""" + def __init__(self, message, url): + msg = "Unpack failure for URL: '%s'. %s" % (url, message) + self.url = url + BBFetchException.__init__(self, msg) + self.args = (message, url) + +class NoMethodError(BBFetchException): + """Exception raised when there is no method to obtain a supplied url or set of urls""" + def __init__(self, url): + msg = "Could not find a fetcher which supports the URL: '%s'" % url + self.url = url + BBFetchException.__init__(self, msg) + self.args = (url,) + +class MissingParameterError(BBFetchException): + """Exception raised when a fetch method is missing a critical parameter in the url""" + def __init__(self, missing, url): + msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing) + self.url = url + self.missing = missing + BBFetchException.__init__(self, msg) + self.args = (missing, url) + +class ParameterError(BBFetchException): + """Exception raised when a url cannot be proccessed due to invalid parameters.""" + def __init__(self, message, url): + msg = "URL: '%s' has invalid parameters. %s" % (url, message) + self.url = url + BBFetchException.__init__(self, msg) + self.args = (message, url) + +class NetworkAccess(BBFetchException): + """Exception raised when network access is disabled but it is required.""" + def __init__(self, url, cmd): + msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url) + self.url = url + self.cmd = cmd + BBFetchException.__init__(self, msg) + self.args = (url, cmd) + +class NonLocalMethod(Exception): + def __init__(self): + Exception.__init__(self) + + +class URI(object): + """ + A class representing a generic URI, with methods for + accessing the URI components, and stringifies to the + URI. + + It is constructed by calling it with a URI, or setting + the attributes manually: + + uri = URI("http://example.com/") + + uri = URI() + uri.scheme = 'http' + uri.hostname = 'example.com' + uri.path = '/' + + It has the following attributes: + + * scheme (read/write) + * userinfo (authentication information) (read/write) + * username (read/write) + * password (read/write) + + Note, password is deprecated as of RFC 3986. + + * hostname (read/write) + * port (read/write) + * hostport (read only) + "hostname:port", if both are set, otherwise just "hostname" + * path (read/write) + * path_quoted (read/write) + A URI quoted version of path + * params (dict) (read/write) + * query (dict) (read/write) + * relative (bool) (read only) + True if this is a "relative URI", (e.g. file:foo.diff) + + It stringifies to the URI itself. + + Some notes about relative URIs: while it's specified that + a URI beginning with :// should either be directly + followed by a hostname or a /, the old URI handling of the + fetch2 library did not comform to this. Therefore, this URI + class has some kludges to make sure that URIs are parsed in + a way comforming to bitbake's current usage. This URI class + supports the following: + + file:relative/path.diff (IETF compliant) + git:relative/path.git (IETF compliant) + git:///absolute/path.git (IETF compliant) + file:///absolute/path.diff (IETF compliant) + + file://relative/path.diff (not IETF compliant) + + But it does not support the following: + + file://hostname/absolute/path.diff (would be IETF compliant) + + Note that the last case only applies to a list of + "whitelisted" schemes (currently only file://), that requires + its URIs to not have a network location. + """ + + _relative_schemes = ['file', 'git'] + _netloc_forbidden = ['file'] + + def __init__(self, uri=None): + self.scheme = '' + self.userinfo = '' + self.hostname = '' + self.port = None + self._path = '' + self.params = {} + self.query = {} + self.relative = False + + if not uri: + return + + # We hijack the URL parameters, since the way bitbake uses + # them are not quite RFC compliant. + uri, param_str = (uri.split(";", 1) + [None])[:2] + + urlp = urlparse.urlparse(uri) + self.scheme = urlp.scheme + + reparse = 0 + + # Coerce urlparse to make URI scheme use netloc + if not self.scheme in urlparse.uses_netloc: + urlparse.uses_params.append(self.scheme) + reparse = 1 + + # Make urlparse happy(/ier) by converting local resources + # to RFC compliant URL format. E.g.: + # file://foo.diff -> file:foo.diff + if urlp.scheme in self._netloc_forbidden: + uri = re.sub("(?<=:)//(?!/)", "", uri, 1) + reparse = 1 + + if reparse: + urlp = urlparse.urlparse(uri) + + # Identify if the URI is relative or not + if urlp.scheme in self._relative_schemes and \ + re.compile("^\w+:(?!//)").match(uri): + self.relative = True + + if not self.relative: + self.hostname = urlp.hostname or '' + self.port = urlp.port + + self.userinfo += urlp.username or '' + + if urlp.password: + self.userinfo += ':%s' % urlp.password + + self.path = urllib.unquote(urlp.path) + + if param_str: + self.params = self._param_str_split(param_str, ";") + if urlp.query: + self.query = self._param_str_split(urlp.query, "&") + + def __str__(self): + userinfo = self.userinfo + if userinfo: + userinfo += '@' + + return "%s:%s%s%s%s%s%s" % ( + self.scheme, + '' if self.relative else '//', + userinfo, + self.hostport, + self.path_quoted, + self._query_str(), + self._param_str()) + + def _param_str(self): + return ( + ''.join([';', self._param_str_join(self.params, ";")]) + if self.params else '') + + def _query_str(self): + return ( + ''.join(['?', self._param_str_join(self.query, "&")]) + if self.query else '') + + def _param_str_split(self, string, elmdelim, kvdelim="="): + ret = {} + for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]: + ret[k] = v + return ret + + def _param_str_join(self, dict_, elmdelim, kvdelim="="): + return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) + + @property + def hostport(self): + if not self.port: + return self.hostname + return "%s:%d" % (self.hostname, self.port) + + @property + def path_quoted(self): + return urllib.quote(self.path) + + @path_quoted.setter + def path_quoted(self, path): + self.path = urllib.unquote(path) + + @property + def path(self): + return self._path + + @path.setter + def path(self, path): + self._path = path + + if re.compile("^/").match(path): + self.relative = False + else: + self.relative = True + + @property + def username(self): + if self.userinfo: + return (self.userinfo.split(":", 1))[0] + return '' + + @username.setter + def username(self, username): + password = self.password + self.userinfo = username + if password: + self.userinfo += ":%s" % password + + @property + def password(self): + if self.userinfo and ":" in self.userinfo: + return (self.userinfo.split(":", 1))[1] + return '' + + @password.setter + def password(self, password): + self.userinfo = "%s:%s" % (self.username, password) + +def decodeurl(url): + """Decodes an URL into the tokens (scheme, network location, path, + user, password, parameters). + """ + + m = re.compile('(?P[^:]*)://((?P[^/]+)@)?(?P[^;]+)(;(?P.*))?').match(url) + if not m: + raise MalformedUrl(url) + + type = m.group('type') + location = m.group('location') + if not location: + raise MalformedUrl(url) + user = m.group('user') + parm = m.group('parm') + + locidx = location.find('/') + if locidx != -1 and type.lower() != 'file': + host = location[:locidx] + path = location[locidx:] + else: + host = "" + path = location + if user: + m = re.compile('(?P[^:]+)(:?(?P.*))').match(user) + if m: + user = m.group('user') + pswd = m.group('pswd') + else: + user = '' + pswd = '' + + p = {} + if parm: + for s in parm.split(';'): + s1, s2 = s.split('=') + p[s1] = s2 + + return type, host, urllib.unquote(path), user, pswd, p + +def encodeurl(decoded): + """Encodes a URL from tokens (scheme, network location, path, + user, password, parameters). + """ + + type, host, path, user, pswd, p = decoded + + if not path: + raise MissingParameterError('path', "encoded from the data %s" % str(decoded)) + if not type: + raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) + url = '%s://' % type + if user and type != "file": + url += "%s" % user + if pswd: + url += ":%s" % pswd + url += "@" + if host and type != "file": + url += "%s" % host + # Standardise path to ensure comparisons work + while '//' in path: + path = path.replace("//", "/") + url += "%s" % urllib.quote(path) + if p: + for parm in p: + url += ";%s=%s" % (parm, p[parm]) + + return url + +def uri_replace(ud, uri_find, uri_replace, replacements, d): + if not ud.url or not uri_find or not uri_replace: + logger.error("uri_replace: passed an undefined value, not replacing") + return None + uri_decoded = list(decodeurl(ud.url)) + uri_find_decoded = list(decodeurl(uri_find)) + uri_replace_decoded = list(decodeurl(uri_replace)) + logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) + result_decoded = ['', '', '', '', '', {}] + for loc, i in enumerate(uri_find_decoded): + result_decoded[loc] = uri_decoded[loc] + regexp = i + if loc == 0 and regexp and not regexp.endswith("$"): + # Leaving the type unanchored can mean "https" matching "file" can become "files" + # which is clearly undesirable. + regexp += "$" + if loc == 5: + # Handle URL parameters + if i: + # Any specified URL parameters must match + for k in uri_replace_decoded[loc]: + if uri_decoded[loc][k] != uri_replace_decoded[loc][k]: + return None + # Overwrite any specified replacement parameters + for k in uri_replace_decoded[loc]: + for l in replacements: + uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) + result_decoded[loc][k] = uri_replace_decoded[loc][k] + elif (re.match(regexp, uri_decoded[loc])): + if not uri_replace_decoded[loc]: + result_decoded[loc] = "" + else: + for k in replacements: + uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) + #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) + result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc]) + if loc == 2: + # Handle path manipulations + basename = None + if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball: + # If the source and destination url types differ, must be a mirrortarball mapping + basename = os.path.basename(ud.mirrortarball) + # Kill parameters, they make no sense for mirror tarballs + uri_decoded[5] = {} + elif ud.localpath and ud.method.supports_checksum(ud): + basename = os.path.basename(ud.localpath) + if basename and not result_decoded[loc].endswith(basename): + result_decoded[loc] = os.path.join(result_decoded[loc], basename) + else: + return None + result = encodeurl(result_decoded) + if result == ud.url: + return None + logger.debug(2, "For url %s returning %s" % (ud.url, result)) + return result + +methods = [] +urldata_cache = {} +saved_headrevs = {} + +def fetcher_init(d): + """ + Called to initialize the fetchers once the configuration data is known. + Calls before this must not hit the cache. + """ + # When to drop SCM head revisions controlled by user policy + srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" + if srcrev_policy == "cache": + logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) + elif srcrev_policy == "clear": + logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) + revs = bb.persist_data.persist('BB_URI_HEADREVS', d) + try: + bb.fetch2.saved_headrevs = revs.items() + except: + pass + revs.clear() + else: + raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) + + _checksum_cache.init_cache(d) + + for m in methods: + if hasattr(m, "init"): + m.init(d) + +def fetcher_parse_save(d): + _checksum_cache.save_extras(d) + +def fetcher_parse_done(d): + _checksum_cache.save_merge(d) + +def fetcher_compare_revisions(d): + """ + Compare the revisions in the persistant cache with current values and + return true/false on whether they've changed. + """ + + data = bb.persist_data.persist('BB_URI_HEADREVS', d).items() + data2 = bb.fetch2.saved_headrevs + + changed = False + for key in data: + if key not in data2 or data2[key] != data[key]: + logger.debug(1, "%s changed", key) + changed = True + return True + else: + logger.debug(2, "%s did not change", key) + return False + +def mirror_from_string(data): + return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] + +def verify_checksum(ud, d): + """ + verify the MD5 and SHA256 checksum for downloaded src + + Raises a FetchError if one or both of the SRC_URI checksums do not match + the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no + checksums specified. + + """ + + if not ud.method.supports_checksum(ud): + return + + md5data = bb.utils.md5_file(ud.localpath) + sha256data = bb.utils.sha256_file(ud.localpath) + + if ud.method.recommends_checksum(ud): + # If strict checking enabled and neither sum defined, raise error + strict = d.getVar("BB_STRICT_CHECKSUM", True) or None + if strict and not (ud.md5_expected or ud.sha256_expected): + logger.error('No checksum specified for %s, please add at least one to the recipe:\n' + 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % + (ud.localpath, ud.md5_name, md5data, + ud.sha256_name, sha256data)) + raise NoChecksumError('Missing SRC_URI checksum', ud.url) + + # Log missing sums so user can more easily add them + if not ud.md5_expected: + logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n' + 'SRC_URI[%s] = "%s"', + ud.localpath, ud.md5_name, md5data) + + if not ud.sha256_expected: + logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n' + 'SRC_URI[%s] = "%s"', + ud.localpath, ud.sha256_name, sha256data) + + md5mismatch = False + sha256mismatch = False + + if ud.md5_expected != md5data: + md5mismatch = True + + if ud.sha256_expected != sha256data: + sha256mismatch = True + + # We want to alert the user if a checksum is defined in the recipe but + # it does not match. + msg = "" + mismatch = False + if md5mismatch and ud.md5_expected: + msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected) + mismatch = True; + + if sha256mismatch and ud.sha256_expected: + msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected) + mismatch = True; + + if mismatch: + msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data) + + if len(msg): + raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data) + + +def update_stamp(ud, d): + """ + donestamp is file stamp indicating the whole fetching is done + this function update the stamp after verifying the checksum + """ + if os.path.exists(ud.donestamp): + # Touch the done stamp file to show active use of the download + try: + os.utime(ud.donestamp, None) + except: + # Errors aren't fatal here + pass + else: + verify_checksum(ud, d) + open(ud.donestamp, 'w').close() + +def subprocess_setup(): + # Python installs a SIGPIPE handler by default. This is usually not what + # non-Python subprocesses expect. + # SIGPIPE errors are known issues with gzip/bash + signal.signal(signal.SIGPIPE, signal.SIG_DFL) + +def get_autorev(d): + # only not cache src rev in autorev case + if d.getVar('BB_SRCREV_POLICY', True) != "cache": + d.setVar('__BB_DONT_CACHE', '1') + return "AUTOINC" + +def get_srcrev(d): + """ + Return the version string for the current package + (usually to be used as PV) + Most packages usually only have one SCM so we just pass on the call. + In the multi SCM case, we build a value based on SRCREV_FORMAT which must + have been set. + """ + + scms = [] + fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) + urldata = fetcher.ud + for u in urldata: + if urldata[u].method.supports_srcrev(): + scms.append(u) + + if len(scms) == 0: + raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") + + if len(scms) == 1 and len(urldata[scms[0]].names) == 1: + autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0]) + if len(rev) > 10: + rev = rev[:10] + if autoinc: + return "AUTOINC+" + rev + return rev + + # + # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT + # + format = d.getVar('SRCREV_FORMAT', True) + if not format: + raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") + + seenautoinc = False + for scm in scms: + ud = urldata[scm] + for name in ud.names: + autoinc, rev = ud.method.sortable_revision(ud, d, name) + seenautoinc = seenautoinc or autoinc + if len(rev) > 10: + rev = rev[:10] + format = format.replace(name, rev) + if seenautoinc: + format = "AUTOINC+" + format + + return format + +def localpath(url, d): + fetcher = bb.fetch2.Fetch([url], d) + return fetcher.localpath(url) + +def runfetchcmd(cmd, d, quiet = False, cleanup = []): + """ + Run cmd returning the command output + Raise an error if interrupted or cmd fails + Optionally echo command output to stdout + Optionally remove the files/directories listed in cleanup upon failure + """ + + # Need to export PATH as binary could be in metadata paths + # rather than host provided + # Also include some other variables. + # FIXME: Should really include all export varaiables? + exportvars = ['HOME', 'PATH', + 'HTTP_PROXY', 'http_proxy', + 'HTTPS_PROXY', 'https_proxy', + 'FTP_PROXY', 'ftp_proxy', + 'FTPS_PROXY', 'ftps_proxy', + 'NO_PROXY', 'no_proxy', + 'ALL_PROXY', 'all_proxy', + 'GIT_PROXY_COMMAND', + 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', + 'SOCKS5_USER', 'SOCKS5_PASSWD'] + + for var in exportvars: + val = d.getVar(var, True) + if val: + cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) + + logger.debug(1, "Running %s", cmd) + + success = False + error_message = "" + + try: + (output, errors) = bb.process.run(cmd, shell=True, stderr=subprocess.PIPE) + success = True + except bb.process.NotFoundError as e: + error_message = "Fetch command %s" % (e.command) + except bb.process.ExecutionError as e: + if e.stdout: + output = "output:\n%s\n%s" % (e.stdout, e.stderr) + elif e.stderr: + output = "output:\n%s" % e.stderr + else: + output = "no output" + error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output) + except bb.process.CmdError as e: + error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) + if not success: + for f in cleanup: + try: + bb.utils.remove(f, True) + except OSError: + pass + + raise FetchError(error_message) + + return output + +def check_network_access(d, info = "", url = None): + """ + log remote network access, and error if BB_NO_NETWORK is set + """ + if d.getVar("BB_NO_NETWORK", True) == "1": + raise NetworkAccess(url, info) + else: + logger.debug(1, "Fetcher accessed the network with the command %s" % info) + +def build_mirroruris(origud, mirrors, ld): + uris = [] + uds = [] + + replacements = {} + replacements["TYPE"] = origud.type + replacements["HOST"] = origud.host + replacements["PATH"] = origud.path + replacements["BASENAME"] = origud.path.split("/")[-1] + replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.') + + def adduri(ud, uris, uds): + for line in mirrors: + try: + (find, replace) = line + except ValueError: + continue + newuri = uri_replace(ud, find, replace, replacements, ld) + if not newuri or newuri in uris or newuri == origud.url: + continue + try: + newud = FetchData(newuri, ld) + newud.setup_localpath(ld) + except bb.fetch2.BBFetchException as e: + logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) + logger.debug(1, str(e)) + try: + ud.method.clean(ud, ld) + except UnboundLocalError: + pass + continue + uris.append(newuri) + uds.append(newud) + + adduri(newud, uris, uds) + + adduri(origud, uris, uds) + + return uris, uds + +def rename_bad_checksum(ud, suffix): + """ + Renames files to have suffix from parameter + """ + + if ud.localpath is None: + return + + new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix) + bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath)) + bb.utils.movefile(ud.localpath, new_localpath) + + +def try_mirror_url(origud, ud, ld, check = False): + # Return of None or a value means we're finished + # False means try another url + try: + if check: + found = ud.method.checkstatus(ud, ld) + if found: + return found + return False + + os.chdir(ld.getVar("DL_DIR", True)) + + if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld): + ud.method.download(ud, ld) + if hasattr(ud.method,"build_mirror_data"): + ud.method.build_mirror_data(ud, ld) + + if not ud.localpath or not os.path.exists(ud.localpath): + return False + + if ud.localpath == origud.localpath: + return ud.localpath + + # We may be obtaining a mirror tarball which needs further processing by the real fetcher + # If that tarball is a local file:// we need to provide a symlink to it + dldir = ld.getVar("DL_DIR", True) + if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ + and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): + bb.utils.mkdirhier(os.path.dirname(ud.donestamp)) + open(ud.donestamp, 'w').close() + dest = os.path.join(dldir, os.path.basename(ud.localpath)) + if not os.path.exists(dest): + os.symlink(ud.localpath, dest) + if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld): + origud.method.download(origud, ld) + if hasattr(origud.method,"build_mirror_data"): + origud.method.build_mirror_data(origud, ld) + return ud.localpath + # Otherwise the result is a local file:// and we symlink to it + if not os.path.exists(origud.localpath): + if os.path.islink(origud.localpath): + # Broken symbolic link + os.unlink(origud.localpath) + + os.symlink(ud.localpath, origud.localpath) + update_stamp(origud, ld) + return ud.localpath + + except bb.fetch2.NetworkAccess: + raise + + except bb.fetch2.BBFetchException as e: + if isinstance(e, ChecksumError): + logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url)) + logger.warn(str(e)) + rename_bad_checksum(ud, e.checksum) + elif isinstance(e, NoChecksumError): + raise + else: + logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) + logger.debug(1, str(e)) + try: + ud.method.clean(ud, ld) + except UnboundLocalError: + pass + return False + +def try_mirrors(d, origud, mirrors, check = False): + """ + Try to use a mirrored version of the sources. + This method will be automatically called before the fetchers go. + + d Is a bb.data instance + uri is the original uri we're trying to download + mirrors is the list of mirrors we're going to try + """ + ld = d.createCopy() + + uris, uds = build_mirroruris(origud, mirrors, ld) + + for index, uri in enumerate(uris): + ret = try_mirror_url(origud, uds[index], ld, check) + if ret != False: + return ret + return None + +def srcrev_internal_helper(ud, d, name): + """ + Return: + a) a source revision if specified + b) latest revision if SRCREV="AUTOINC" + c) None if not specified + """ + + srcrev = None + pn = d.getVar("PN", True) + attempts = [] + if name != '' and pn: + attempts.append("SRCREV_%s_pn-%s" % (name, pn)) + if name != '': + attempts.append("SRCREV_%s" % name) + if pn: + attempts.append("SRCREV_pn-%s" % pn) + attempts.append("SRCREV") + + for a in attempts: + srcrev = d.getVar(a, True) + if srcrev and srcrev != "INVALID": + break + + if 'rev' in ud.parm and 'tag' in ud.parm: + raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) + + if 'rev' in ud.parm or 'tag' in ud.parm: + if 'rev' in ud.parm: + parmrev = ud.parm['rev'] + else: + parmrev = ud.parm['tag'] + if srcrev == "INVALID" or not srcrev: + return parmrev + if srcrev != parmrev: + raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please spcify one valid value" % (srcrev, parmrev)) + return parmrev + + if srcrev == "INVALID" or not srcrev: + raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) + if srcrev == "AUTOINC": + srcrev = ud.method.latest_revision(ud, d, name) + + return srcrev + +def get_checksum_file_list(d): + """ Get a list of files checksum in SRC_URI + + Returns the resolved local paths of all local file entries in + SRC_URI as a space-separated string + """ + fetch = Fetch([], d, cache = False, localonly = True) + + dl_dir = d.getVar('DL_DIR', True) + filelist = [] + for u in fetch.urls: + ud = fetch.ud[u] + + if ud and isinstance(ud.method, local.Local): + ud.setup_localpath(d) + f = ud.localpath + pth = ud.decodedurl + if '*' in pth: + f = os.path.join(os.path.abspath(f), pth) + if f.startswith(dl_dir): + # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else + if os.path.exists(f): + bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) + else: + bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) + filelist.append(f) + + return " ".join(filelist) + + +def get_file_checksums(filelist, pn): + """Get a list of the checksums for a list of local files + + Returns the checksums for a list of local files, caching the results as + it proceeds + + """ + + def checksum_file(f): + try: + checksum = _checksum_cache.get_checksum(f) + except OSError as e: + bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e)) + return None + return checksum + + def checksum_dir(pth): + # Handle directories recursively + dirchecksums = [] + for root, dirs, files in os.walk(pth): + for name in files: + fullpth = os.path.join(root, name) + checksum = checksum_file(fullpth) + if checksum: + dirchecksums.append((fullpth, checksum)) + return dirchecksums + + checksums = [] + for pth in filelist.split(): + checksum = None + if '*' in pth: + # Handle globs + for f in glob.glob(pth): + if os.path.isdir(f): + checksums.extend(checksum_dir(f)) + else: + checksum = checksum_file(f) + if checksum: + checksums.append((f, checksum)) + continue + elif os.path.isdir(pth): + checksums.extend(checksum_dir(pth)) + continue + else: + checksum = checksum_file(pth) + + if checksum: + checksums.append((pth, checksum)) + + checksums.sort(key=operator.itemgetter(1)) + return checksums + + +class FetchData(object): + """ + A class which represents the fetcher state for a given URI. + """ + def __init__(self, url, d, localonly = False): + # localpath is the location of a downloaded result. If not set, the file is local. + self.donestamp = None + self.localfile = "" + self.localpath = None + self.lockfile = None + self.mirrortarball = None + self.basename = None + self.basepath = None + (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) + self.date = self.getSRCDate(d) + self.url = url + if not self.user and "user" in self.parm: + self.user = self.parm["user"] + if not self.pswd and "pswd" in self.parm: + self.pswd = self.parm["pswd"] + self.setup = False + + if "name" in self.parm: + self.md5_name = "%s.md5sum" % self.parm["name"] + self.sha256_name = "%s.sha256sum" % self.parm["name"] + else: + self.md5_name = "md5sum" + self.sha256_name = "sha256sum" + if self.md5_name in self.parm: + self.md5_expected = self.parm[self.md5_name] + elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: + self.md5_expected = None + else: + self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) + if self.sha256_name in self.parm: + self.sha256_expected = self.parm[self.sha256_name] + elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: + self.sha256_expected = None + else: + self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) + + self.names = self.parm.get("name",'default').split(',') + + self.method = None + for m in methods: + if m.supports(self, d): + self.method = m + break + + if not self.method: + raise NoMethodError(url) + + if localonly and not isinstance(self.method, local.Local): + raise NonLocalMethod() + + if self.parm.get("proto", None) and "protocol" not in self.parm: + logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True)) + self.parm["protocol"] = self.parm.get("proto", None) + + if hasattr(self.method, "urldata_init"): + self.method.urldata_init(self, d) + + if "localpath" in self.parm: + # if user sets localpath for file, use it instead. + self.localpath = self.parm["localpath"] + self.basename = os.path.basename(self.localpath) + elif self.localfile: + self.localpath = self.method.localpath(self, d) + + dldir = d.getVar("DL_DIR", True) + # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be. + if self.localpath and self.localpath.startswith(dldir): + basepath = self.localpath + elif self.localpath: + basepath = dldir + os.sep + os.path.basename(self.localpath) + else: + basepath = dldir + os.sep + (self.basepath or self.basename) + self.donestamp = basepath + '.done' + self.lockfile = basepath + '.lock' + + def setup_revisons(self, d): + self.revisions = {} + for name in self.names: + self.revisions[name] = srcrev_internal_helper(self, d, name) + + # add compatibility code for non name specified case + if len(self.names) == 1: + self.revision = self.revisions[self.names[0]] + + def setup_localpath(self, d): + if not self.localpath: + self.localpath = self.method.localpath(self, d) + + def getSRCDate(self, d): + """ + Return the SRC Date for the component + + d the bb.data module + """ + if "srcdate" in self.parm: + return self.parm['srcdate'] + + pn = d.getVar("PN", True) + + if pn: + return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) + + return d.getVar("SRCDATE", True) or d.getVar("DATE", True) + +class FetchMethod(object): + """Base class for 'fetch'ing data""" + + def __init__(self, urls = []): + self.urls = [] + + def supports(self, urldata, d): + """ + Check to see if this fetch class supports a given url. + """ + return 0 + + def localpath(self, urldata, d): + """ + Return the local filename of a given url assuming a successful fetch. + Can also setup variables in urldata for use in go (saving code duplication + and duplicate code execution) + """ + return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile) + + def supports_checksum(self, urldata): + """ + Is localpath something that can be represented by a checksum? + """ + + # We cannot compute checksums for directories + if os.path.isdir(urldata.localpath) == True: + return False + if urldata.localpath.find("*") != -1: + return False + + return True + + def recommends_checksum(self, urldata): + """ + Is the backend on where checksumming is recommended (should warnings + be displayed if there is no checksum)? + """ + return False + + def _strip_leading_slashes(self, relpath): + """ + Remove leading slash as os.path.join can't cope + """ + while os.path.isabs(relpath): + relpath = relpath[1:] + return relpath + + def setUrls(self, urls): + self.__urls = urls + + def getUrls(self): + return self.__urls + + urls = property(getUrls, setUrls, None, "Urls property") + + def need_update(self, ud, d): + """ + Force a fetch, even if localpath exists? + """ + if os.path.exists(ud.localpath): + return False + return True + + def supports_srcrev(self): + """ + The fetcher supports auto source revisions (SRCREV) + """ + return False + + def download(self, urldata, d): + """ + Fetch urls + Assumes localpath was called first + """ + raise NoMethodError(url) + + def unpack(self, urldata, rootdir, data): + iterate = False + file = urldata.localpath + + try: + unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True) + except ValueError as exc: + bb.fatal("Invalid value for 'unpack' parameter for %s: %s" % + (file, urldata.parm.get('unpack'))) + + dots = file.split(".") + if dots[-1] in ['gz', 'bz2', 'Z', 'xz']: + efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1]))) + else: + efile = file + cmd = None + + if unpack: + if file.endswith('.tar'): + cmd = 'tar x --no-same-owner -f %s' % file + elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): + cmd = 'tar xz --no-same-owner -f %s' % file + elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): + cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file + elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): + cmd = 'gzip -dc %s > %s' % (file, efile) + elif file.endswith('.bz2'): + cmd = 'bzip2 -dc %s > %s' % (file, efile) + elif file.endswith('.tar.xz'): + cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file + elif file.endswith('.xz'): + cmd = 'xz -dc %s > %s' % (file, efile) + elif file.endswith('.zip') or file.endswith('.jar'): + try: + dos = bb.utils.to_boolean(urldata.parm.get('dos'), False) + except ValueError as exc: + bb.fatal("Invalid value for 'dos' parameter for %s: %s" % + (file, urldata.parm.get('dos'))) + cmd = 'unzip -q -o' + if dos: + cmd = '%s -a' % cmd + cmd = "%s '%s'" % (cmd, file) + elif file.endswith('.rpm') or file.endswith('.srpm'): + if 'extract' in urldata.parm: + unpack_file = urldata.parm.get('extract') + cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file) + iterate = True + iterate_file = unpack_file + else: + cmd = 'rpm2cpio.sh %s | cpio -id' % (file) + elif file.endswith('.deb') or file.endswith('.ipk'): + cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file + + if not unpack or not cmd: + # If file == dest, then avoid any copies, as we already put the file into dest! + dest = os.path.join(rootdir, os.path.basename(file)) + if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)): + if os.path.isdir(file): + # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar + basepath = getattr(urldata, "basepath", None) + destdir = "." + if basepath and basepath.endswith("/"): + basepath = basepath.rstrip("/") + elif basepath: + basepath = os.path.dirname(basepath) + if basepath and basepath.find("/") != -1: + destdir = basepath[:basepath.rfind('/')] + destdir = destdir.strip('/') + if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK): + os.makedirs("%s/%s" % (rootdir, destdir)) + cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir) + #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir) + else: + # The "destdir" handling was specifically done for FILESPATH + # items. So, only do so for file:// entries. + if urldata.type == "file" and urldata.path.find("/") != -1: + destdir = urldata.path.rsplit("/", 1)[0] + else: + destdir = "." + bb.utils.mkdirhier("%s/%s" % (rootdir, destdir)) + cmd = 'cp %s %s/%s/' % (file, rootdir, destdir) + + if not cmd: + return + + # Change to subdir before executing command + save_cwd = os.getcwd(); + os.chdir(rootdir) + if 'subdir' in urldata.parm: + newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir'))) + bb.utils.mkdirhier(newdir) + os.chdir(newdir) + + path = data.getVar('PATH', True) + if path: + cmd = "PATH=\"%s\" %s" % (path, cmd) + bb.note("Unpacking %s to %s/" % (file, os.getcwd())) + ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True) + + os.chdir(save_cwd) + + if ret != 0: + raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url) + + if iterate is True: + iterate_urldata = urldata + iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file) + self.unpack(urldata, rootdir, data) + + return + + def clean(self, urldata, d): + """ + Clean any existing full or partial download + """ + bb.utils.remove(urldata.localpath) + + def try_premirror(self, urldata, d): + """ + Should premirrors be used? + """ + return True + + def checkstatus(self, urldata, d): + """ + Check the status of a URL + Assumes localpath was called first + """ + logger.info("URL %s could not be checked for status since no method exists.", url) + return True + + def latest_revision(self, ud, d, name): + """ + Look in the cache for the latest revision, if not present ask the SCM. + """ + if not hasattr(self, "_latest_revision"): + raise ParameterError("The fetcher for this URL does not support _latest_revision", url) + + revs = bb.persist_data.persist('BB_URI_HEADREVS', d) + key = self.generate_revision_key(ud, d, name) + try: + return revs[key] + except KeyError: + revs[key] = rev = self._latest_revision(ud, d, name) + return rev + + def sortable_revision(self, ud, d, name): + latest_rev = self._build_revision(ud, d, name) + return True, str(latest_rev) + + def generate_revision_key(self, ud, d, name): + key = self._revision_key(ud, d, name) + return "%s-%s" % (key, d.getVar("PN", True) or "") + +class Fetch(object): + def __init__(self, urls, d, cache = True, localonly = False): + if localonly and cache: + raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") + + if len(urls) == 0: + urls = d.getVar("SRC_URI", True).split() + self.urls = urls + self.d = d + self.ud = {} + + fn = d.getVar('FILE', True) + if cache and fn and fn in urldata_cache: + self.ud = urldata_cache[fn] + + for url in urls: + if url not in self.ud: + try: + self.ud[url] = FetchData(url, d, localonly) + except NonLocalMethod: + if localonly: + self.ud[url] = None + pass + + if fn and cache: + urldata_cache[fn] = self.ud + + def localpath(self, url): + if url not in self.urls: + self.ud[url] = FetchData(url, self.d) + + self.ud[url].setup_localpath(self.d) + return self.d.expand(self.ud[url].localpath) + + def localpaths(self): + """ + Return a list of the local filenames, assuming successful fetch + """ + local = [] + + for u in self.urls: + ud = self.ud[u] + ud.setup_localpath(self.d) + local.append(ud.localpath) + + return local + + def download(self, urls = []): + """ + Fetch all urls + """ + if len(urls) == 0: + urls = self.urls + + network = self.d.getVar("BB_NO_NETWORK", True) + premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") + + for u in urls: + ud = self.ud[u] + ud.setup_localpath(self.d) + m = ud.method + localpath = "" + + lf = bb.utils.lockfile(ud.lockfile) + + try: + self.d.setVar("BB_NO_NETWORK", network) + + if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d): + localpath = ud.localpath + elif m.try_premirror(ud, self.d): + logger.debug(1, "Trying PREMIRRORS") + mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) + localpath = try_mirrors(self.d, ud, mirrors, False) + + if premirroronly: + self.d.setVar("BB_NO_NETWORK", "1") + + os.chdir(self.d.getVar("DL_DIR", True)) + + firsterr = None + if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)): + try: + logger.debug(1, "Trying Upstream") + m.download(ud, self.d) + if hasattr(m, "build_mirror_data"): + m.build_mirror_data(ud, self.d) + localpath = ud.localpath + # early checksum verify, so that if checksum mismatched, + # fetcher still have chance to fetch from mirror + update_stamp(ud, self.d) + + except bb.fetch2.NetworkAccess: + raise + + except BBFetchException as e: + if isinstance(e, ChecksumError): + logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u) + logger.debug(1, str(e)) + rename_bad_checksum(ud, e.checksum) + elif isinstance(e, NoChecksumError): + raise + else: + logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u) + logger.debug(1, str(e)) + firsterr = e + # Remove any incomplete fetch + m.clean(ud, self.d) + logger.debug(1, "Trying MIRRORS") + mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) + localpath = try_mirrors (self.d, ud, mirrors) + + if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): + if firsterr: + logger.error(str(firsterr)) + raise FetchError("Unable to fetch URL from any source.", u) + + update_stamp(ud, self.d) + + except BBFetchException as e: + if isinstance(e, ChecksumError): + logger.error("Checksum failure fetching %s" % u) + raise + + finally: + bb.utils.unlockfile(lf) + + def checkstatus(self, urls = []): + """ + Check all urls exist upstream + """ + + if len(urls) == 0: + urls = self.urls + + for u in urls: + ud = self.ud[u] + ud.setup_localpath(self.d) + m = ud.method + logger.debug(1, "Testing URL %s", u) + # First try checking uri, u, from PREMIRRORS + mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) + ret = try_mirrors(self.d, ud, mirrors, True) + if not ret: + # Next try checking from the original uri, u + try: + ret = m.checkstatus(ud, self.d) + except: + # Finally, try checking uri, u, from MIRRORS + mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) + ret = try_mirrors(self.d, ud, mirrors, True) + + if not ret: + raise FetchError("URL %s doesn't work" % u, u) + + def unpack(self, root, urls = []): + """ + Check all urls exist upstream + """ + + if len(urls) == 0: + urls = self.urls + + for u in urls: + ud = self.ud[u] + ud.setup_localpath(self.d) + + if self.d.expand(self.localpath) is None: + continue + + if ud.lockfile: + lf = bb.utils.lockfile(ud.lockfile) + + ud.method.unpack(ud, root, self.d) + + if ud.lockfile: + bb.utils.unlockfile(lf) + + def clean(self, urls = []): + """ + Clean files that the fetcher gets or places + """ + + if len(urls) == 0: + urls = self.urls + + for url in urls: + if url not in self.ud: + self.ud[url] = FetchData(url, d) + ud = self.ud[url] + ud.setup_localpath(self.d) + + if not ud.localfile and ud.localpath is None: + continue + + if ud.lockfile: + lf = bb.utils.lockfile(ud.lockfile) + + ud.method.clean(ud, self.d) + if ud.donestamp: + bb.utils.remove(ud.donestamp) + + if ud.lockfile: + bb.utils.unlockfile(lf) + +from . import cvs +from . import git +from . import gitsm +from . import gitannex +from . import local +from . import svn +from . import wget +from . import ssh +from . import sftp +from . import perforce +from . import bzr +from . import hg +from . import osc +from . import repo + +methods.append(local.Local()) +methods.append(wget.Wget()) +methods.append(svn.Svn()) +methods.append(git.Git()) +methods.append(gitsm.GitSM()) +methods.append(gitannex.GitANNEX()) +methods.append(cvs.Cvs()) +methods.append(ssh.SSH()) +methods.append(sftp.SFTP()) +methods.append(perforce.Perforce()) +methods.append(bzr.Bzr()) +methods.append(hg.Hg()) +methods.append(osc.Osc()) +methods.append(repo.Repo()) diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py new file mode 100644 index 0000000000..03e9ac461b --- /dev/null +++ b/bitbake/lib/bb/fetch2/bzr.py @@ -0,0 +1,143 @@ +""" +BitBake 'Fetch' implementation for bzr. + +""" + +# Copyright (C) 2007 Ross Burton +# Copyright (C) 2007 Richard Purdie +# +# Classes for obtaining upstream sources for the +# BitBake build tools. +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import sys +import logging +import bb +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class Bzr(FetchMethod): + def supports(self, ud, d): + return ud.type in ['bzr'] + + def urldata_init(self, ud, d): + """ + init bzr specific variable within url data + """ + # Create paths to bzr checkouts + relpath = self._strip_leading_slashes(ud.path) + ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) + + ud.setup_revisons(d) + + if not ud.revision: + ud.revision = self.latest_revision(ud, d) + + ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) + + def _buildbzrcommand(self, ud, d, command): + """ + Build up an bzr commandline based on ud + command is "fetch", "update", "revno" + """ + + basecmd = data.expand('${FETCHCMD_bzr}', d) + + proto = ud.parm.get('protocol', 'http') + + bzrroot = ud.host + ud.path + + options = [] + + if command == "revno": + bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) + else: + if ud.revision: + options.append("-r %s" % ud.revision) + + if command == "fetch": + bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) + elif command == "update": + bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) + else: + raise FetchError("Invalid bzr command %s" % command, ud.url) + + return bzrcmd + + def download(self, ud, d): + """Fetch url""" + + if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): + bzrcmd = self._buildbzrcommand(ud, d, "update") + logger.debug(1, "BZR Update %s", ud.url) + bb.fetch2.check_network_access(d, bzrcmd, ud.url) + os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) + runfetchcmd(bzrcmd, d) + else: + bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) + bzrcmd = self._buildbzrcommand(ud, d, "fetch") + bb.fetch2.check_network_access(d, bzrcmd, ud.url) + logger.debug(1, "BZR Checkout %s", ud.url) + bb.utils.mkdirhier(ud.pkgdir) + os.chdir(ud.pkgdir) + logger.debug(1, "Running %s", bzrcmd) + runfetchcmd(bzrcmd, d) + + os.chdir(ud.pkgdir) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" + + # tar them up to a defined filename + runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath]) + + def supports_srcrev(self): + return True + + def _revision_key(self, ud, d, name): + """ + Return a unique key for the url + """ + return "bzr:" + ud.pkgdir + + def _latest_revision(self, ud, d, name): + """ + Return the latest upstream revision number + """ + logger.debug(2, "BZR fetcher hitting network for %s", ud.url) + + bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) + + output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) + + return output.strip() + + def sortable_revision(self, ud, d, name): + """ + Return a sortable revision number which in our case is the revision number + """ + + return False, self._build_revision(ud, d) + + def _build_revision(self, ud, d): + return ud.revision diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py new file mode 100644 index 0000000000..d27d96f68c --- /dev/null +++ b/bitbake/lib/bb/fetch2/cvs.py @@ -0,0 +1,171 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +#Based on functions from the base bb module, Copyright 2003 Holger Schurig +# + +import os +import logging +import bb +from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger +from bb.fetch2 import runfetchcmd + +class Cvs(FetchMethod): + """ + Class to fetch a module or modules from cvs repositories + """ + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with cvs. + """ + return ud.type in ['cvs'] + + def urldata_init(self, ud, d): + if not "module" in ud.parm: + raise MissingParameterError("module", ud.url) + ud.module = ud.parm["module"] + + ud.tag = ud.parm.get('tag', "") + + # Override the default date in certain cases + if 'date' in ud.parm: + ud.date = ud.parm['date'] + elif ud.tag: + ud.date = "" + + norecurse = '' + if 'norecurse' in ud.parm: + norecurse = '_norecurse' + + fullpath = '' + if 'fullpath' in ud.parm: + fullpath = '_fullpath' + + ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) + + def need_update(self, ud, d): + if (ud.date == "now"): + return True + if not os.path.exists(ud.localpath): + return True + return False + + def download(self, ud, d): + + method = ud.parm.get('method', 'pserver') + localdir = ud.parm.get('localdir', ud.module) + cvs_port = ud.parm.get('port', '') + + cvs_rsh = None + if method == "ext": + if "rsh" in ud.parm: + cvs_rsh = ud.parm["rsh"] + + if method == "dir": + cvsroot = ud.path + else: + cvsroot = ":" + method + cvsproxyhost = d.getVar('CVS_PROXY_HOST', True) + if cvsproxyhost: + cvsroot += ";proxy=" + cvsproxyhost + cvsproxyport = d.getVar('CVS_PROXY_PORT', True) + if cvsproxyport: + cvsroot += ";proxyport=" + cvsproxyport + cvsroot += ":" + ud.user + if ud.pswd: + cvsroot += ":" + ud.pswd + cvsroot += "@" + ud.host + ":" + cvs_port + ud.path + + options = [] + if 'norecurse' in ud.parm: + options.append("-l") + if ud.date: + # treat YYYYMMDDHHMM specially for CVS + if len(ud.date) == 12: + options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) + else: + options.append("-D \"%s UTC\"" % ud.date) + if ud.tag: + options.append("-r %s" % ud.tag) + + cvsbasecmd = d.getVar("FETCHCMD_cvs", True) + cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module + cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options) + + if cvs_rsh: + cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) + cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) + + # create module directory + logger.debug(2, "Fetch: checking for module directory") + pkg = d.getVar('PN', True) + pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg) + moddir = os.path.join(pkgdir, localdir) + if os.access(os.path.join(moddir, 'CVS'), os.R_OK): + logger.info("Update " + ud.url) + bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) + # update sources there + os.chdir(moddir) + cmd = cvsupdatecmd + else: + logger.info("Fetch " + ud.url) + # check out sources there + bb.utils.mkdirhier(pkgdir) + os.chdir(pkgdir) + logger.debug(1, "Running %s", cvscmd) + bb.fetch2.check_network_access(d, cvscmd, ud.url) + cmd = cvscmd + + runfetchcmd(cmd, d, cleanup = [moddir]) + + if not os.access(moddir, os.R_OK): + raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude 'CVS'" + + # tar them up to a defined filename + if 'fullpath' in ud.parm: + os.chdir(pkgdir) + cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir) + else: + os.chdir(moddir) + os.chdir('..') + cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)) + + runfetchcmd(cmd, d, cleanup = [ud.localpath]) + + def clean(self, ud, d): + """ Clean CVS Files and tarballs """ + + pkg = d.getVar('PN', True) + pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg) + + bb.utils.remove(pkgdir, True) + bb.utils.remove(ud.localpath) + diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py new file mode 100644 index 0000000000..9ca24428a1 --- /dev/null +++ b/bitbake/lib/bb/fetch2/git.py @@ -0,0 +1,355 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' git implementation + +git fetcher support the SRC_URI with format of: +SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..." + +Supported SRC_URI options are: + +- branch + The git branch to retrieve from. The default is "master" + + This option also supports multiple branch fetching, with branches + separated by commas. In multiple branches case, the name option + must have the same number of names to match the branches, which is + used to specify the SRC_REV for the branch + e.g: + SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY" + SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx" + SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY" + +- tag + The git tag to retrieve. The default is "master" + +- protocol + The method to use to access the repository. Common options are "git", + "http", "https", "file", "ssh" and "rsync". The default is "git". + +- rebaseable + rebaseable indicates that the upstream git repo may rebase in the future, + and current revision may disappear from upstream repo. This option will + remind fetcher to preserve local cache carefully for future use. + The default value is "0", set rebaseable=1 for rebaseable git repo. + +- nocheckout + Don't checkout source code when unpacking. set this option for the recipe + who has its own routine to checkout code. + The default is "0", set nocheckout=1 if needed. + +- bareclone + Create a bare clone of the source code and don't checkout the source code + when unpacking. Set this option for the recipe who has its own routine to + checkout code and tracking branch requirements. + The default is "0", set bareclone=1 if needed. + +- nobranch + Don't check the SHA validation for branch. set this option for the recipe + referring to commit which is valid in tag instead of branch. + The default is "0", set nobranch=1 if needed. + +""" + +#Copyright (C) 2005 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import bb +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class Git(FetchMethod): + """Class to fetch a module or modules from git repositories""" + def init(self, d): + pass + + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with git. + """ + return ud.type in ['git'] + + def supports_checksum(self, urldata): + return False + + def urldata_init(self, ud, d): + """ + init git specific variable within url data + so that the git method like latest_revision() can work + """ + if 'protocol' in ud.parm: + ud.proto = ud.parm['protocol'] + elif not ud.host: + ud.proto = 'file' + else: + ud.proto = "git" + + if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'): + raise bb.fetch2.ParameterError("Invalid protocol type", ud.url) + + ud.nocheckout = ud.parm.get("nocheckout","0") == "1" + + ud.rebaseable = ud.parm.get("rebaseable","0") == "1" + + ud.nobranch = ud.parm.get("nobranch","0") == "1" + + # bareclone implies nocheckout + ud.bareclone = ud.parm.get("bareclone","0") == "1" + if ud.bareclone: + ud.nocheckout = 1 + + ud.unresolvedrev = {} + branches = ud.parm.get("branch", "master").split(',') + if len(branches) != len(ud.names): + raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url) + ud.branches = {} + for name in ud.names: + branch = branches[ud.names.index(name)] + ud.branches[name] = branch + ud.unresolvedrev[name] = branch + + ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" + + ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable + + ud.setup_revisons(d) + + for name in ud.names: + # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one + if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]): + if ud.revisions[name]: + ud.unresolvedrev[name] = ud.revisions[name] + ud.revisions[name] = self.latest_revision(ud, d, name) + + gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.')) + # for rebaseable git repo, it is necessary to keep mirror tar ball + # per revision, so that even the revision disappears from the + # upstream repo in the future, the mirror will remain intact and still + # contains the revision + if ud.rebaseable: + for name in ud.names: + gitsrcname = gitsrcname + '_' + ud.revisions[name] + ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname) + ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) + gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/") + ud.clonedir = os.path.join(gitdir, gitsrcname) + + ud.localfile = ud.clonedir + + def localpath(self, ud, d): + return ud.clonedir + + def need_update(self, ud, d): + if not os.path.exists(ud.clonedir): + return True + os.chdir(ud.clonedir) + for name in ud.names: + if not self._contains_ref(ud, d, name): + return True + if ud.write_tarballs and not os.path.exists(ud.fullmirror): + return True + return False + + def try_premirror(self, ud, d): + # If we don't do this, updating an existing checkout with only premirrors + # is not possible + if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: + return True + if os.path.exists(ud.clonedir): + return False + return True + + def download(self, ud, d): + """Fetch url""" + + if ud.user: + username = ud.user + '@' + else: + username = "" + + ud.repochanged = not os.path.exists(ud.fullmirror) + + # If the checkout doesn't exist and the mirror tarball does, extract it + if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror): + bb.utils.mkdirhier(ud.clonedir) + os.chdir(ud.clonedir) + runfetchcmd("tar -xzf %s" % (ud.fullmirror), d) + + repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path) + + # If the repo still doesn't exist, fallback to cloning it + if not os.path.exists(ud.clonedir): + # We do this since git will use a "-l" option automatically for local urls where possible + if repourl.startswith("file://"): + repourl = repourl[7:] + clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir) + if ud.proto.lower() != 'file': + bb.fetch2.check_network_access(d, clone_cmd) + runfetchcmd(clone_cmd, d) + + os.chdir(ud.clonedir) + # Update the checkout if needed + needupdate = False + for name in ud.names: + if not self._contains_ref(ud, d, name): + needupdate = True + if needupdate: + try: + runfetchcmd("%s remote rm origin" % ud.basecmd, d) + except bb.fetch2.FetchError: + logger.debug(1, "No Origin") + + runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d) + fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl) + if ud.proto.lower() != 'file': + bb.fetch2.check_network_access(d, fetch_cmd, ud.url) + runfetchcmd(fetch_cmd, d) + runfetchcmd("%s prune-packed" % ud.basecmd, d) + runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) + ud.repochanged = True + os.chdir(ud.clonedir) + for name in ud.names: + if not self._contains_ref(ud, d, name): + raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name])) + + def build_mirror_data(self, ud, d): + # Generate a mirror tarball if needed + if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)): + # it's possible that this symlink points to read-only filesystem with PREMIRROR + if os.path.islink(ud.fullmirror): + os.unlink(ud.fullmirror) + + os.chdir(ud.clonedir) + logger.info("Creating tarball of git repository") + runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d) + runfetchcmd("touch %s.done" % (ud.fullmirror), d) + + def unpack(self, ud, destdir, d): + """ unpack the downloaded src to destdir""" + + subdir = ud.parm.get("subpath", "") + if subdir != "": + readpathspec = ":%s" % (subdir) + def_destsuffix = "%s/" % os.path.basename(subdir) + else: + readpathspec = "" + def_destsuffix = "git/" + + destsuffix = ud.parm.get("destsuffix", def_destsuffix) + destdir = ud.destdir = os.path.join(destdir, destsuffix) + if os.path.exists(destdir): + bb.utils.prunedir(destdir) + + cloneflags = "-s -n" + if ud.bareclone: + cloneflags += " --mirror" + + # Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused + # and you end up with some horrible union of the two when you attempt to clone it + # The least invasive workaround seems to be a symlink to the real directory to + # fool git into ignoring any .git version that may also be present. + # + # The issue is fixed in more recent versions of git so we can drop this hack in future + # when that version becomes common enough. + clonedir = ud.clonedir + if not ud.path.endswith(".git"): + indirectiondir = destdir[:-1] + ".indirectionsymlink" + if os.path.exists(indirectiondir): + os.remove(indirectiondir) + bb.utils.mkdirhier(os.path.dirname(indirectiondir)) + os.symlink(ud.clonedir, indirectiondir) + clonedir = indirectiondir + + runfetchcmd("git clone %s %s/ %s" % (cloneflags, clonedir, destdir), d) + if not ud.nocheckout: + os.chdir(destdir) + if subdir != "": + runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d) + runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d) + else: + runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d) + return True + + def clean(self, ud, d): + """ clean the git directory """ + + bb.utils.remove(ud.localpath, True) + bb.utils.remove(ud.fullmirror) + bb.utils.remove(ud.fullmirror + ".done") + + def supports_srcrev(self): + return True + + def _contains_ref(self, ud, d, name): + cmd = "" + if ud.nobranch: + cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( + ud.basecmd, ud.revisions[name]) + else: + cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( + ud.basecmd, ud.revisions[name], ud.branches[name]) + try: + output = runfetchcmd(cmd, d, quiet=True) + except bb.fetch2.FetchError: + return False + if len(output.split()) > 1: + raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) + return output.split()[0] != "0" + + def _revision_key(self, ud, d, name): + """ + Return a unique key for the url + """ + return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name] + + def _lsremote(self, ud, d, search): + """ + Run git ls-remote with the specified search string + """ + if ud.user: + username = ud.user + '@' + else: + username = "" + + cmd = "%s ls-remote %s://%s%s%s %s" % \ + (ud.basecmd, ud.proto, username, ud.host, ud.path, search) + if ud.proto.lower() != 'file': + bb.fetch2.check_network_access(d, cmd) + output = runfetchcmd(cmd, d, True) + if not output: + raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url) + return output + + def _latest_revision(self, ud, d, name): + """ + Compute the HEAD revision for the url + """ + search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name]) + output = self._lsremote(ud, d, search) + return output.split()[0] + + def _build_revision(self, ud, d, name): + return ud.revisions[name] + + def checkstatus(self, ud, d): + fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url) + try: + runfetchcmd(fetchcmd, d, quiet=True) + return True + except FetchError: + return False diff --git a/bitbake/lib/bb/fetch2/gitannex.py b/bitbake/lib/bb/fetch2/gitannex.py new file mode 100644 index 0000000000..0f37897450 --- /dev/null +++ b/bitbake/lib/bb/fetch2/gitannex.py @@ -0,0 +1,76 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' git annex implementation +""" + +# Copyright (C) 2014 Otavio Salvador +# Copyright (C) 2014 O.S. Systems Software LTDA. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import bb +from bb import data +from bb.fetch2.git import Git +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class GitANNEX(Git): + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with git. + """ + return ud.type in ['gitannex'] + + def uses_annex(self, ud, d): + for name in ud.names: + try: + runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True) + return True + except bb.fetch.FetchError: + pass + + return False + + def update_annex(self, ud, d): + try: + runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True) + except bb.fetch.FetchError: + return False + runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True) + + return True + + def download(self, ud, d): + Git.download(self, ud, d) + + os.chdir(ud.clonedir) + annex = self.uses_annex(ud, d) + if annex: + self.update_annex(ud, d) + + def unpack(self, ud, destdir, d): + Git.unpack(self, ud, destdir, d) + + os.chdir(ud.destdir) + try: + runfetchcmd("%s annex sync" % (ud.basecmd), d) + except bb.fetch.FetchError: + pass + + annex = self.uses_annex(ud, d) + if annex: + runfetchcmd("%s annex get" % (ud.basecmd), d) + runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True) diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py new file mode 100644 index 0000000000..1a762153c4 --- /dev/null +++ b/bitbake/lib/bb/fetch2/gitsm.py @@ -0,0 +1,126 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' git submodules implementation +""" + +# Copyright (C) 2013 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import bb +from bb import data +from bb.fetch2.git import Git +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class GitSM(Git): + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with git. + """ + return ud.type in ['gitsm'] + + def uses_submodules(self, ud, d): + for name in ud.names: + try: + runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True) + return True + except bb.fetch.FetchError: + pass + return False + + def _set_relative_paths(self, repopath): + """ + Fix submodule paths to be relative instead of absolute, + so that when we move the repo it doesn't break + (In Git 1.7.10+ this is done automatically) + """ + submodules = [] + with open(os.path.join(repopath, '.gitmodules'), 'r') as f: + for line in f.readlines(): + if line.startswith('[submodule'): + submodules.append(line.split('"')[1]) + + for module in submodules: + repo_conf = os.path.join(repopath, module, '.git') + if os.path.exists(repo_conf): + with open(repo_conf, 'r') as f: + lines = f.readlines() + newpath = '' + for i, line in enumerate(lines): + if line.startswith('gitdir:'): + oldpath = line.split(': ')[-1].rstrip() + if oldpath.startswith('/'): + newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module + lines[i] = 'gitdir: %s\n' % newpath + break + if newpath: + with open(repo_conf, 'w') as f: + for line in lines: + f.write(line) + + repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config') + if os.path.exists(repo_conf2): + with open(repo_conf2, 'r') as f: + lines = f.readlines() + newpath = '' + for i, line in enumerate(lines): + if line.lstrip().startswith('worktree = '): + oldpath = line.split(' = ')[-1].rstrip() + if oldpath.startswith('/'): + newpath = '../' * (module.count('/') + 3) + module + lines[i] = '\tworktree = %s\n' % newpath + break + if newpath: + with open(repo_conf2, 'w') as f: + for line in lines: + f.write(line) + + def update_submodules(self, ud, d): + # We have to convert bare -> full repo, do the submodule bit, then convert back + tmpclonedir = ud.clonedir + ".tmp" + gitdir = tmpclonedir + os.sep + ".git" + bb.utils.remove(tmpclonedir, True) + os.mkdir(tmpclonedir) + os.rename(ud.clonedir, gitdir) + runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d) + os.chdir(tmpclonedir) + runfetchcmd(ud.basecmd + " reset --hard", d) + runfetchcmd(ud.basecmd + " submodule init", d) + runfetchcmd(ud.basecmd + " submodule update", d) + self._set_relative_paths(tmpclonedir) + runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d) + os.rename(gitdir, ud.clonedir,) + bb.utils.remove(tmpclonedir, True) + + def download(self, ud, d): + Git.download(self, ud, d) + + os.chdir(ud.clonedir) + submodules = self.uses_submodules(ud, d) + if submodules: + self.update_submodules(ud, d) + + def unpack(self, ud, destdir, d): + Git.unpack(self, ud, destdir, d) + + os.chdir(ud.destdir) + submodules = self.uses_submodules(ud, d) + if submodules: + runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d) + runfetchcmd(ud.basecmd + " submodule init", d) + runfetchcmd(ud.basecmd + " submodule update", d) + diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py new file mode 100644 index 0000000000..6927f6111e --- /dev/null +++ b/bitbake/lib/bb/fetch2/hg.py @@ -0,0 +1,187 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementation for mercurial DRCS (hg). + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2004 Marcin Juszkiewicz +# Copyright (C) 2007 Robert Schuster +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import sys +import logging +import bb +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import MissingParameterError +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class Hg(FetchMethod): + """Class to fetch from mercurial repositories""" + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with mercurial. + """ + return ud.type in ['hg'] + + def urldata_init(self, ud, d): + """ + init hg specific variable within url data + """ + if not "module" in ud.parm: + raise MissingParameterError('module', ud.url) + + ud.module = ud.parm["module"] + + # Create paths to mercurial checkouts + relpath = self._strip_leading_slashes(ud.path) + ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath) + ud.moddir = os.path.join(ud.pkgdir, ud.module) + + ud.setup_revisons(d) + + if 'rev' in ud.parm: + ud.revision = ud.parm['rev'] + elif not ud.revision: + ud.revision = self.latest_revision(ud, d) + + ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) + + def need_update(self, ud, d): + revTag = ud.parm.get('rev', 'tip') + if revTag == "tip": + return True + if not os.path.exists(ud.localpath): + return True + return False + + def _buildhgcommand(self, ud, d, command): + """ + Build up an hg commandline based on ud + command is "fetch", "update", "info" + """ + + basecmd = data.expand('${FETCHCMD_hg}', d) + + proto = ud.parm.get('protocol', 'http') + + host = ud.host + if proto == "file": + host = "/" + ud.host = "localhost" + + if not ud.user: + hgroot = host + ud.path + else: + if ud.pswd: + hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path + else: + hgroot = ud.user + "@" + host + ud.path + + if command == "info": + return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) + + options = []; + + # Don't specify revision for the fetch; clone the entire repo. + # This avoids an issue if the specified revision is a tag, because + # the tag actually exists in the specified revision + 1, so it won't + # be available when used in any successive commands. + if ud.revision and command != "fetch": + options.append("-r %s" % ud.revision) + + if command == "fetch": + cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) + elif command == "pull": + # do not pass options list; limiting pull to rev causes the local + # repo not to contain it and immediately following "update" command + # will crash + if ud.user and ud.pswd: + cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto) + else: + cmd = "%s pull" % (basecmd) + elif command == "update": + cmd = "%s update -C %s" % (basecmd, " ".join(options)) + else: + raise FetchError("Invalid hg command %s" % command, ud.url) + + return cmd + + def download(self, ud, d): + """Fetch url""" + + logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + + if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): + updatecmd = self._buildhgcommand(ud, d, "pull") + logger.info("Update " + ud.url) + # update sources there + os.chdir(ud.moddir) + logger.debug(1, "Running %s", updatecmd) + bb.fetch2.check_network_access(d, updatecmd, ud.url) + runfetchcmd(updatecmd, d) + + else: + fetchcmd = self._buildhgcommand(ud, d, "fetch") + logger.info("Fetch " + ud.url) + # check out sources there + bb.utils.mkdirhier(ud.pkgdir) + os.chdir(ud.pkgdir) + logger.debug(1, "Running %s", fetchcmd) + bb.fetch2.check_network_access(d, fetchcmd, ud.url) + runfetchcmd(fetchcmd, d) + + # Even when we clone (fetch), we still need to update as hg's clone + # won't checkout the specified revision if its on a branch + updatecmd = self._buildhgcommand(ud, d, "update") + os.chdir(ud.moddir) + logger.debug(1, "Running %s", updatecmd) + runfetchcmd(updatecmd, d) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude '.hg' --exclude '.hgrags'" + + os.chdir(ud.pkgdir) + runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath]) + + def supports_srcrev(self): + return True + + def _latest_revision(self, ud, d, name): + """ + Compute tip revision for the url + """ + bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info")) + output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) + return output.strip() + + def _build_revision(self, ud, d, name): + return ud.revision + + def _revision_key(self, ud, d, name): + """ + Return a unique key for the url + """ + return "hg:" + ud.moddir diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py new file mode 100644 index 0000000000..5c4e42a942 --- /dev/null +++ b/bitbake/lib/bb/fetch2/local.py @@ -0,0 +1,116 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import urllib +import bb +import bb.utils +from bb import data +from bb.fetch2 import FetchMethod, FetchError +from bb.fetch2 import logger + +class Local(FetchMethod): + def supports(self, urldata, d): + """ + Check to see if a given url represents a local fetch. + """ + return urldata.type in ['file'] + + def urldata_init(self, ud, d): + # We don't set localfile as for this fetcher the file is already local! + ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0]) + ud.basename = os.path.basename(ud.decodedurl) + ud.basepath = ud.decodedurl + return + + def localpath(self, urldata, d): + """ + Return the local filename of a given url assuming a successful fetch. + """ + path = urldata.decodedurl + newpath = path + if path[0] != "/": + filespath = data.getVar('FILESPATH', d, True) + if filespath: + logger.debug(2, "Searching for %s in paths: \n%s" % (path, "\n ".join(filespath.split(":")))) + newpath = bb.utils.which(filespath, path) + if not newpath: + filesdir = data.getVar('FILESDIR', d, True) + if filesdir: + logger.debug(2, "Searching for %s in path: %s" % (path, filesdir)) + newpath = os.path.join(filesdir, path) + if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1: + # For expressions using '*', best we can do is take the first directory in FILESPATH that exists + newpath = bb.utils.which(filespath, ".") + logger.debug(2, "Searching for %s in path: %s" % (path, newpath)) + return newpath + if not os.path.exists(newpath): + dldirfile = os.path.join(d.getVar("DL_DIR", True), path) + logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) + bb.utils.mkdirhier(os.path.dirname(dldirfile)) + return dldirfile + return newpath + + def need_update(self, ud, d): + if ud.url.find("*") != -1: + return False + if os.path.exists(ud.localpath): + return False + return True + + def download(self, urldata, d): + """Fetch urls (no-op for Local method)""" + # no need to fetch local files, we'll deal with them in place. + if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): + locations = [] + filespath = data.getVar('FILESPATH', d, True) + if filespath: + locations = filespath.split(":") + filesdir = data.getVar('FILESDIR', d, True) + if filesdir: + locations.append(filesdir) + locations.append(d.getVar("DL_DIR", True)) + + msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) + raise FetchError(msg) + + return True + + def checkstatus(self, urldata, d): + """ + Check the status of the url + """ + if urldata.localpath.find("*") != -1: + logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url) + return True + if os.path.exists(urldata.localpath): + return True + return False + + def clean(self, urldata, d): + return + diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py new file mode 100644 index 0000000000..3d8779682f --- /dev/null +++ b/bitbake/lib/bb/fetch2/osc.py @@ -0,0 +1,135 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +Bitbake "Fetch" implementation for osc (Opensuse build service client). +Based on the svn "Fetch" implementation. + +""" + +import os +import sys +import logging +import bb +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import MissingParameterError +from bb.fetch2 import runfetchcmd + +class Osc(FetchMethod): + """Class to fetch a module or modules from Opensuse build server + repositories.""" + + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with osc. + """ + return ud.type in ['osc'] + + def urldata_init(self, ud, d): + if not "module" in ud.parm: + raise MissingParameterError('module', ud.url) + + ud.module = ud.parm["module"] + + # Create paths to osc checkouts + relpath = self._strip_leading_slashes(ud.path) + ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) + ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) + + if 'rev' in ud.parm: + ud.revision = ud.parm['rev'] + else: + pv = data.getVar("PV", d, 0) + rev = bb.fetch2.srcrev_internal_helper(ud, d) + if rev and rev != True: + ud.revision = rev + else: + ud.revision = "" + + ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d) + + def _buildosccommand(self, ud, d, command): + """ + Build up an ocs commandline based on ud + command is "fetch", "update", "info" + """ + + basecmd = data.expand('${FETCHCMD_osc}', d) + + proto = ud.parm.get('protocol', 'ocs') + + options = [] + + config = "-c %s" % self.generate_config(ud, d) + + if ud.revision: + options.append("-r %s" % ud.revision) + + coroot = self._strip_leading_slashes(ud.path) + + if command == "fetch": + osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) + elif command == "update": + osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) + else: + raise FetchError("Invalid osc command %s" % command, ud.url) + + return osccmd + + def download(self, ud, d): + """ + Fetch url + """ + + logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + + if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): + oscupdatecmd = self._buildosccommand(ud, d, "update") + logger.info("Update "+ ud.url) + # update sources there + os.chdir(ud.moddir) + logger.debug(1, "Running %s", oscupdatecmd) + bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) + runfetchcmd(oscupdatecmd, d) + else: + oscfetchcmd = self._buildosccommand(ud, d, "fetch") + logger.info("Fetch " + ud.url) + # check out sources there + bb.utils.mkdirhier(ud.pkgdir) + os.chdir(ud.pkgdir) + logger.debug(1, "Running %s", oscfetchcmd) + bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) + runfetchcmd(oscfetchcmd, d) + + os.chdir(os.path.join(ud.pkgdir + ud.path)) + # tar them up to a defined filename + runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath]) + + def supports_srcrev(self): + return False + + def generate_config(self, ud, d): + """ + Generate a .oscrc to be used for this run. + """ + + config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc") + if (os.path.exists(config_path)): + os.remove(config_path) + + f = open(config_path, 'w') + f.write("[general]\n") + f.write("apisrv = %s\n" % ud.host) + f.write("scheme = http\n") + f.write("su-wrapper = su -c\n") + f.write("build-root = %s\n" % data.expand('${WORKDIR}', d)) + f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n") + f.write("extra-pkgs = gzip\n") + f.write("\n") + f.write("[%s]\n" % ud.host) + f.write("user = %s\n" % ud.parm["user"]) + f.write("pass = %s\n" % ud.parm["pswd"]) + f.close() + + return config_path diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py new file mode 100644 index 0000000000..9329d72779 --- /dev/null +++ b/bitbake/lib/bb/fetch2/perforce.py @@ -0,0 +1,194 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +from future_builtins import zip +import os +import subprocess +import logging +import bb +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import logger +from bb.fetch2 import runfetchcmd + +class Perforce(FetchMethod): + def supports(self, ud, d): + return ud.type in ['p4'] + + def doparse(url, d): + parm = {} + path = url.split("://")[1] + delim = path.find("@"); + if delim != -1: + (user, pswd, host, port) = path.split('@')[0].split(":") + path = path.split('@')[1] + else: + (host, port) = data.getVar('P4PORT', d).split(':') + user = "" + pswd = "" + + if path.find(";") != -1: + keys=[] + values=[] + plist = path.split(';') + for item in plist: + if item.count('='): + (key, value) = item.split('=') + keys.append(key) + values.append(value) + + parm = dict(zip(keys, values)) + path = "//" + path.split(';')[0] + host += ":%s" % (port) + parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) + + return host, path, user, pswd, parm + doparse = staticmethod(doparse) + + def getcset(d, depot, host, user, pswd, parm): + p4opt = "" + if "cset" in parm: + return parm["cset"]; + if user: + p4opt += " -u %s" % (user) + if pswd: + p4opt += " -P %s" % (pswd) + if host: + p4opt += " -p %s" % (host) + + p4date = data.getVar("P4DATE", d, True) + if "revision" in parm: + depot += "#%s" % (parm["revision"]) + elif "label" in parm: + depot += "@%s" % (parm["label"]) + elif p4date: + depot += "@%s" % (p4date) + + p4cmd = data.getVar('FETCHCMD_p4', d, True) + logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot) + p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) + cset = p4file.strip() + logger.debug(1, "READ %s", cset) + if not cset: + return -1 + + return cset.split(' ')[1] + getcset = staticmethod(getcset) + + def urldata_init(self, ud, d): + (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d) + + # If a label is specified, we use that as our filename + + if "label" in parm: + ud.localfile = "%s.tar.gz" % (parm["label"]) + return + + base = path + which = path.find('/...') + if which != -1: + base = path[:which-1] + + base = self._strip_leading_slashes(base) + + cset = Perforce.getcset(d, path, host, user, pswd, parm) + + ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d) + + def download(self, ud, d): + """ + Fetch urls + """ + + (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d) + + if depot.find('/...') != -1: + path = depot[:depot.find('/...')] + else: + path = depot + + module = parm.get('module', os.path.basename(path)) + + # Get the p4 command + p4opt = "" + if user: + p4opt += " -u %s" % (user) + + if pswd: + p4opt += " -P %s" % (pswd) + + if host: + p4opt += " -p %s" % (host) + + p4cmd = data.getVar('FETCHCMD_p4', d, True) + + # create temp directory + logger.debug(2, "Fetch: creating temporary directory") + bb.utils.mkdirhier(d.expand('${WORKDIR}')) + mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'") + tmpfile, errors = bb.process.run(mktemp) + tmpfile = tmpfile.strip() + if not tmpfile: + raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) + + if "label" in parm: + depot = "%s@%s" % (depot, parm["label"]) + else: + cset = Perforce.getcset(d, depot, host, user, pswd, parm) + depot = "%s@%s" % (depot, cset) + + os.chdir(tmpfile) + logger.info("Fetch " + ud.url) + logger.info("%s%s files %s", p4cmd, p4opt, depot) + p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot)) + p4file = [f.rstrip() for f in p4file.splitlines()] + + if not p4file: + raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url) + + count = 0 + + for file in p4file: + list = file.split() + + if list[2] == "delete": + continue + + dest = list[0][len(path)+1:] + where = dest.find("#") + + subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True) + count = count + 1 + + if count == 0: + logger.error() + raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url) + + runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath]) + # cleanup + bb.utils.prunedir(tmpfile) diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py new file mode 100644 index 0000000000..21678eb7d9 --- /dev/null +++ b/bitbake/lib/bb/fetch2/repo.py @@ -0,0 +1,98 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake "Fetch" repo (git) implementation + +""" + +# Copyright (C) 2009 Tom Rini +# +# Based on git.py which is: +#Copyright (C) 2005 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import bb +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import runfetchcmd + +class Repo(FetchMethod): + """Class to fetch a module or modules from repo (git) repositories""" + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with repo. + """ + return ud.type in ["repo"] + + def urldata_init(self, ud, d): + """ + We don"t care about the git rev of the manifests repository, but + we do care about the manifest to use. The default is "default". + We also care about the branch or tag to be used. The default is + "master". + """ + + ud.proto = ud.parm.get('protocol', 'git') + ud.branch = ud.parm.get('branch', 'master') + ud.manifest = ud.parm.get('manifest', 'default.xml') + if not ud.manifest.endswith('.xml'): + ud.manifest += '.xml' + + ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) + + def download(self, ud, d): + """Fetch url""" + + if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): + logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) + return + + gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) + repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") + codir = os.path.join(repodir, gitsrcname, ud.manifest) + + if ud.user: + username = ud.user + "@" + else: + username = "" + + bb.utils.mkdirhier(os.path.join(codir, "repo")) + os.chdir(os.path.join(codir, "repo")) + if not os.path.exists(os.path.join(codir, "repo", ".repo")): + bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url) + runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) + + bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url) + runfetchcmd("repo sync", d) + os.chdir(codir) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude '.repo' --exclude '.git'" + + # Create a cache + runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d) + + def supports_srcrev(self): + return False + + def _build_revision(self, ud, d): + return ud.manifest + + def _want_sortable_revision(self, ud, d): + return False diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py new file mode 100644 index 0000000000..8ea4ef2ff3 --- /dev/null +++ b/bitbake/lib/bb/fetch2/sftp.py @@ -0,0 +1,129 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake SFTP Fetch implementation + +Class for fetching files via SFTP. It tries to adhere to the (now +expired) IETF Internet Draft for "Uniform Resource Identifier (URI) +Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell +(SSH)" (SECSH URI). + +It uses SFTP (as to adhere to the SECSH URI specification). It only +supports key based authentication, not password. This class, unlike +the SSH fetcher, does not support fetching a directory tree from the +remote. + + http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04 + https://www.iana.org/assignments/uri-schemes/prov/sftp + https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13 + +Please note that '/' is used as host path seperator, and not ":" +as you may be used to from the scp/sftp commands. You can use a +~ (tilde) to specify a path relative to your home directory. +(The /~user/ syntax, for specyfing a path relative to another +user's home directory is not supported.) Note that the tilde must +still follow the host path seperator ("/"). See exampels below. + +Example SRC_URIs: + +SRC_URI = "sftp://host.example.com/dir/path.file.txt" + +A path relative to your home directory. + +SRC_URI = "sftp://host.example.com/~/dir/path.file.txt" + +You can also specify a username (specyfing password in the +URI is not supported, use SSH keys to authenticate): + +SRC_URI = "sftp://user@host.example.com/dir/path.file.txt" + +""" + +# Copyright (C) 2013, Olof Johansson +# +# Based in part on bb.fetch2.wget: +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import bb +import urllib +import commands +from bb import data +from bb.fetch2 import URI +from bb.fetch2 import FetchMethod +from bb.fetch2 import runfetchcmd + + +class SFTP(FetchMethod): + """Class to fetch urls via 'sftp'""" + + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with sftp. + """ + return ud.type in ['sftp'] + + def recommends_checksum(self, urldata): + return True + + def urldata_init(self, ud, d): + if 'protocol' in ud.parm and ud.parm['protocol'] == 'git': + raise bb.fetch2.ParameterError( + "Invalid protocol - if you wish to fetch from a " + + "git repository using ssh, you need to use the " + + "git:// prefix with protocol=ssh", ud.url) + + if 'downloadfilename' in ud.parm: + ud.basename = ud.parm['downloadfilename'] + else: + ud.basename = os.path.basename(ud.path) + + ud.localfile = data.expand(urllib.unquote(ud.basename), d) + + def download(self, ud, d): + """Fetch urls""" + + urlo = URI(ud.url) + basecmd = 'sftp -oPasswordAuthentication=no' + port = '' + if urlo.port: + port = '-P %d' % urlo.port + urlo.port = None + + dldir = data.getVar('DL_DIR', d, True) + lpath = os.path.join(dldir, ud.localfile) + + user = '' + if urlo.userinfo: + user = urlo.userinfo + '@' + + path = urlo.path + + # Supoprt URIs relative to the user's home directory, with + # the tilde syntax. (E.g. ). + if path[:3] == '/~/': + path = path[3:] + + remote = '%s%s:%s' % (user, urlo.hostname, path) + + cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote), + commands.mkarg(lpath)) + + bb.fetch2.check_network_access(d, cmd, ud.url) + runfetchcmd(cmd, d) + return True diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py new file mode 100644 index 0000000000..4ae979472c --- /dev/null +++ b/bitbake/lib/bb/fetch2/ssh.py @@ -0,0 +1,127 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +''' +BitBake 'Fetch' implementations + +This implementation is for Secure Shell (SSH), and attempts to comply with the +IETF secsh internet draft: + http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/ + + Currently does not support the sftp parameters, as this uses scp + Also does not support the 'fingerprint' connection parameter. + + Please note that '/' is used as host, path separator not ':' as you may + be used to, also '~' can be used to specify user HOME, but again after '/' + + Example SRC_URI: + SRC_URI = "ssh://user@host.example.com/dir/path/file.txt" + SRC_URI = "ssh://user@host.example.com/~/file.txt" +''' + +# Copyright (C) 2006 OpenedHand Ltd. +# +# +# Based in part on svk.py: +# Copyright (C) 2006 Holger Hans Peter Freyther +# Based on svn.py: +# Copyright (C) 2003, 2004 Chris Larson +# Based on functions from the base bb module: +# Copyright 2003 Holger Schurig +# +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import re, os +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import logger +from bb.fetch2 import runfetchcmd + + +__pattern__ = re.compile(r''' + \s* # Skip leading whitespace + ssh:// # scheme + ( # Optional username/password block + (?P\S+) # username + (:(?P\S+))? # colon followed by the password (optional) + )? + (?P(;[^;]+)*)? # connection parameters block (optional) + @ + (?P\S+?) # non-greedy match of the host + (:(?P[0-9]+))? # colon followed by the port (optional) + / + (?P[^;]+) # path on the remote system, may be absolute or relative, + # and may include the use of '~' to reference the remote home + # directory + (?P(;[^;]+)*)? # parameters block (optional) + $ +''', re.VERBOSE) + +class SSH(FetchMethod): + '''Class to fetch a module or modules via Secure Shell''' + + def supports(self, urldata, d): + return __pattern__.match(urldata.url) != None + + def supports_checksum(self, urldata): + return False + + def urldata_init(self, urldata, d): + if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git': + raise bb.fetch2.ParameterError( + "Invalid protocol - if you wish to fetch from a git " + + "repository using ssh, you need to use " + + "git:// prefix with protocol=ssh", urldata.url) + m = __pattern__.match(urldata.url) + path = m.group('path') + host = m.group('host') + urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path)) + + def download(self, urldata, d): + dldir = d.getVar('DL_DIR', True) + + m = __pattern__.match(urldata.url) + path = m.group('path') + host = m.group('host') + port = m.group('port') + user = m.group('user') + password = m.group('pass') + + if port: + portarg = '-P %s' % port + else: + portarg = '' + + if user: + fr = user + if password: + fr += ':%s' % password + fr += '@%s' % host + else: + fr = host + fr += ':%s' % path + + + import commands + cmd = 'scp -B -r %s %s %s/' % ( + portarg, + commands.mkarg(fr), + commands.mkarg(dldir) + ) + + bb.fetch2.check_network_access(d, cmd, urldata.url) + + runfetchcmd(cmd, d) + diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py new file mode 100644 index 0000000000..8847461913 --- /dev/null +++ b/bitbake/lib/bb/fetch2/svn.py @@ -0,0 +1,191 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementation for svn. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# Copyright (C) 2004 Marcin Juszkiewicz +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import sys +import logging +import bb +import re +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import MissingParameterError +from bb.fetch2 import runfetchcmd +from bb.fetch2 import logger + +class Svn(FetchMethod): + """Class to fetch a module or modules from svn repositories""" + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with svn. + """ + return ud.type in ['svn'] + + def urldata_init(self, ud, d): + """ + init svn specific variable within url data + """ + if not "module" in ud.parm: + raise MissingParameterError('module', ud.url) + + ud.basecmd = d.getVar('FETCHCMD_svn', True) + + ud.module = ud.parm["module"] + + # Create paths to svn checkouts + relpath = self._strip_leading_slashes(ud.path) + ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) + ud.moddir = os.path.join(ud.pkgdir, ud.module) + + ud.setup_revisons(d) + + if 'rev' in ud.parm: + ud.revision = ud.parm['rev'] + + ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) + + def _buildsvncommand(self, ud, d, command): + """ + Build up an svn commandline based on ud + command is "fetch", "update", "info" + """ + + proto = ud.parm.get('protocol', 'svn') + + svn_rsh = None + if proto == "svn+ssh" and "rsh" in ud.parm: + svn_rsh = ud.parm["rsh"] + + svnroot = ud.host + ud.path + + options = [] + + options.append("--no-auth-cache") + + if ud.user: + options.append("--username %s" % ud.user) + + if ud.pswd: + options.append("--password %s" % ud.pswd) + + if command == "info": + svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) + elif command == "log1": + svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) + else: + suffix = "" + if ud.revision: + options.append("-r %s" % ud.revision) + suffix = "@%s" % (ud.revision) + + if command == "fetch": + svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module) + elif command == "update": + svncmd = "%s update %s" % (ud.basecmd, " ".join(options)) + else: + raise FetchError("Invalid svn command %s" % command, ud.url) + + if svn_rsh: + svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) + + return svncmd + + def download(self, ud, d): + """Fetch url""" + + logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") + + if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): + svnupdatecmd = self._buildsvncommand(ud, d, "update") + logger.info("Update " + ud.url) + # update sources there + os.chdir(ud.moddir) + # We need to attempt to run svn upgrade first in case its an older working format + try: + runfetchcmd(ud.basecmd + " upgrade", d) + except FetchError: + pass + logger.debug(1, "Running %s", svnupdatecmd) + bb.fetch2.check_network_access(d, svnupdatecmd, ud.url) + runfetchcmd(svnupdatecmd, d) + else: + svnfetchcmd = self._buildsvncommand(ud, d, "fetch") + logger.info("Fetch " + ud.url) + # check out sources there + bb.utils.mkdirhier(ud.pkgdir) + os.chdir(ud.pkgdir) + logger.debug(1, "Running %s", svnfetchcmd) + bb.fetch2.check_network_access(d, svnfetchcmd, ud.url) + runfetchcmd(svnfetchcmd, d) + + scmdata = ud.parm.get("scmdata", "") + if scmdata == "keep": + tar_flags = "" + else: + tar_flags = "--exclude '.svn'" + + os.chdir(ud.pkgdir) + # tar them up to a defined filename + runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath]) + + def clean(self, ud, d): + """ Clean SVN specific files and dirs """ + + bb.utils.remove(ud.localpath) + bb.utils.remove(ud.moddir, True) + + + def supports_srcrev(self): + return True + + def _revision_key(self, ud, d, name): + """ + Return a unique key for the url + """ + return "svn:" + ud.moddir + + def _latest_revision(self, ud, d, name): + """ + Return the latest upstream revision number + """ + bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1")) + + output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True) + + # skip the first line, as per output of svn log + # then we expect the revision on the 2nd line + revision = re.search('^r([0-9]*)', output.splitlines()[1]).group(1) + + return revision + + def sortable_revision(self, ud, d, name): + """ + Return a sortable revision number which in our case is the revision number + """ + + return False, self._build_revision(ud, d) + + def _build_revision(self, ud, d): + return ud.revision diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py new file mode 100644 index 0000000000..0456490368 --- /dev/null +++ b/bitbake/lib/bb/fetch2/wget.py @@ -0,0 +1,106 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. + +""" + +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import logging +import bb +import urllib +from bb import data +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import logger +from bb.fetch2 import runfetchcmd + +class Wget(FetchMethod): + """Class to fetch urls via 'wget'""" + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with wget. + """ + return ud.type in ['http', 'https', 'ftp'] + + def recommends_checksum(self, urldata): + return True + + def urldata_init(self, ud, d): + if 'protocol' in ud.parm: + if ud.parm['protocol'] == 'git': + raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url) + + if 'downloadfilename' in ud.parm: + ud.basename = ud.parm['downloadfilename'] + else: + ud.basename = os.path.basename(ud.path) + + ud.localfile = data.expand(urllib.unquote(ud.basename), d) + + self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" + + def _runwget(self, ud, d, command, quiet): + + logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) + bb.fetch2.check_network_access(d, command) + runfetchcmd(command, d, quiet) + + def download(self, ud, d): + """Fetch urls""" + + fetchcmd = self.basecmd + + if 'downloadfilename' in ud.parm: + dldir = d.getVar("DL_DIR", True) + bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) + fetchcmd += " -O " + dldir + os.sep + ud.localfile + + uri = ud.url.split(";")[0] + if os.path.exists(ud.localpath): + # file exists, but we didnt complete it.. trying again.. + fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri) + else: + fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri) + + self._runwget(ud, d, fetchcmd, False) + + # Sanity check since wget can pretend it succeed when it didn't + # Also, this used to happen if sourceforge sent us to the mirror page + if not os.path.exists(ud.localpath): + raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) + + if os.path.getsize(ud.localpath) == 0: + os.remove(ud.localpath) + raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) + + return True + + def checkstatus(self, ud, d): + + uri = ud.url.split(";")[0] + fetchcmd = self.basecmd + " --spider '%s'" % uri + + self._runwget(ud, d, fetchcmd, True) + + return True -- cgit v1.2.3-54-g00ecf