From 3b186fba59163c5a4eacdaf1be7899b60a1482ee Mon Sep 17 00:00:00 2001 From: Aníbal Limón Date: Wed, 8 Jul 2015 18:34:17 -0500 Subject: bitbake: fetch2/wget.py: Add support of connection cache in checkstatus. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fetch2/__init__.py: Add connection_cache param in Fetch __init__. In order to pass connection cache object to checkstatus method. [YOCTO #7796] (Bitbake rev: 9fa6407e6cefe66c77467419a8040d6957a6bb01) Signed-off-by: Aníbal Limón Signed-off-by: Richard Purdie --- bitbake/lib/bb/fetch2/wget.py | 140 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 136 insertions(+), 4 deletions(-) (limited to 'bitbake/lib/bb/fetch2/wget.py') diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index abacbcf796..8cb5f2be81 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py @@ -101,12 +101,144 @@ class Wget(FetchMethod): return True def checkstatus(self, fetch, ud, d): + import urllib2, socket, httplib + from urllib import addinfourl + from bb.fetch2 import FetchConnectionCache + + class HTTPConnectionCache(httplib.HTTPConnection): + if fetch.connection_cache: + def connect(self): + """Connect to the host and port specified in __init__.""" + + sock = fetch.connection_cache.get_connection(self.host, self.port) + if sock: + self.sock = sock + else: + self.sock = socket.create_connection((self.host, self.port), + self.timeout, self.source_address) + fetch.connection_cache.add_connection(self.host, self.port, self.sock) + + if self._tunnel_host: + self._tunnel() + + class CacheHTTPHandler(urllib2.HTTPHandler): + def http_open(self, req): + return self.do_open(HTTPConnectionCache, req) + + def do_open(self, http_class, req): + """Return an addinfourl object for the request, using http_class. + + http_class must implement the HTTPConnection API from httplib. + The addinfourl return value is a file-like object. It also + has methods and attributes including: + - info(): return a mimetools.Message object for the headers + - geturl(): return the original request URL + - code: HTTP status code + """ + host = req.get_host() + if not host: + raise urlllib2.URLError('no host given') + + h = http_class(host, timeout=req.timeout) # will parse host:port + h.set_debuglevel(self._debuglevel) + + headers = dict(req.unredirected_hdrs) + headers.update(dict((k, v) for k, v in req.headers.items() + if k not in headers)) + + # We want to make an HTTP/1.1 request, but the addinfourl + # class isn't prepared to deal with a persistent connection. + # It will try to read all remaining data from the socket, + # which will block while the server waits for the next request. + # So make sure the connection gets closed after the (only) + # request. + + # Don't close connection when connection_cache is enabled, + if fetch.connection_cache is None: + headers["Connection"] = "close" + else: + headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 + + headers = dict( + (name.title(), val) for name, val in headers.items()) + + if req._tunnel_host: + tunnel_headers = {} + proxy_auth_hdr = "Proxy-Authorization" + if proxy_auth_hdr in headers: + tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] + # Proxy-Authorization should not be sent to origin + # server. + del headers[proxy_auth_hdr] + h.set_tunnel(req._tunnel_host, headers=tunnel_headers) + + try: + h.request(req.get_method(), req.get_selector(), req.data, headers) + except socket.error, err: # XXX what error? + # Don't close connection when cache is enabled. + if fetch.connection_cache is None: + h.close() + raise urllib2.URLError(err) + else: + try: + r = h.getresponse(buffering=True) + except TypeError: # buffering kw not supported + r = h.getresponse() + + # Pick apart the HTTPResponse object to get the addinfourl + # object initialized properly. + + # Wrap the HTTPResponse object in socket's file object adapter + # for Windows. That adapter calls recv(), so delegate recv() + # to read(). This weird wrapping allows the returned object to + # have readline() and readlines() methods. + + # XXX It might be better to extract the read buffering code + # out of socket._fileobject() and into a base class. + r.recv = r.read + + # no data, just have to read + r.read() + class fp_dummy(object): + def read(self): + return "" + def readline(self): + return "" + def close(self): + pass + + resp = addinfourl(fp_dummy(), r.msg, req.get_full_url()) + resp.code = r.status + resp.msg = r.reason + + # Close connection when server request it. + if fetch.connection_cache is not None: + if 'Connection' in r.msg and r.msg['Connection'] == 'close': + fetch.connection_cache.remove_connection(h.host, h.port) + + return resp + + def export_proxies(d): + variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', + 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY'] + + for v in variables: + if not v in os.environ.keys(): + os.environ[v] = d.getVar(v, True) or '' + + def head_method(self): + return "HEAD" + + export_proxies(d) + urllib2.Request.get_method = head_method + opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler) + urllib2.install_opener(opener) uri = ud.url.split(";")[0] - fetchcmd = self.basecmd + " --spider '%s'" % uri - - self._runwget(ud, d, fetchcmd, True) - + try: + f = urllib2.urlopen(uri) + except: + return False return True def _parse_path(self, regex, s): -- cgit v1.2.3-54-g00ecf