diff options
author | Aníbal Limón <anibal.limon@linux.intel.com> | 2015-07-08 18:34:17 -0500 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2015-07-12 22:50:42 +0100 |
commit | 3b186fba59163c5a4eacdaf1be7899b60a1482ee (patch) | |
tree | 084aec7544f31c7f7daf82604552d1cd30c2e0ef /bitbake/lib/bb/fetch2/wget.py | |
parent | 97c5ecffaab5c276781e71d00abad8d4ed1fa91c (diff) | |
download | poky-3b186fba59163c5a4eacdaf1be7899b60a1482ee.tar.gz |
bitbake: fetch2/wget.py: Add support of connection cache in checkstatus.
fetch2/__init__.py: Add connection_cache param in Fetch __init__.
In order to pass connection cache object to checkstatus method.
[YOCTO #7796]
(Bitbake rev: 9fa6407e6cefe66c77467419a8040d6957a6bb01)
Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch2/wget.py')
-rw-r--r-- | bitbake/lib/bb/fetch2/wget.py | 140 |
1 files changed, 136 insertions, 4 deletions
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index abacbcf796..8cb5f2be81 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -101,12 +101,144 @@ class Wget(FetchMethod): | |||
101 | return True | 101 | return True |
102 | 102 | ||
103 | def checkstatus(self, fetch, ud, d): | 103 | def checkstatus(self, fetch, ud, d): |
104 | import urllib2, socket, httplib | ||
105 | from urllib import addinfourl | ||
106 | from bb.fetch2 import FetchConnectionCache | ||
107 | |||
108 | class HTTPConnectionCache(httplib.HTTPConnection): | ||
109 | if fetch.connection_cache: | ||
110 | def connect(self): | ||
111 | """Connect to the host and port specified in __init__.""" | ||
112 | |||
113 | sock = fetch.connection_cache.get_connection(self.host, self.port) | ||
114 | if sock: | ||
115 | self.sock = sock | ||
116 | else: | ||
117 | self.sock = socket.create_connection((self.host, self.port), | ||
118 | self.timeout, self.source_address) | ||
119 | fetch.connection_cache.add_connection(self.host, self.port, self.sock) | ||
120 | |||
121 | if self._tunnel_host: | ||
122 | self._tunnel() | ||
123 | |||
124 | class CacheHTTPHandler(urllib2.HTTPHandler): | ||
125 | def http_open(self, req): | ||
126 | return self.do_open(HTTPConnectionCache, req) | ||
127 | |||
128 | def do_open(self, http_class, req): | ||
129 | """Return an addinfourl object for the request, using http_class. | ||
130 | |||
131 | http_class must implement the HTTPConnection API from httplib. | ||
132 | The addinfourl return value is a file-like object. It also | ||
133 | has methods and attributes including: | ||
134 | - info(): return a mimetools.Message object for the headers | ||
135 | - geturl(): return the original request URL | ||
136 | - code: HTTP status code | ||
137 | """ | ||
138 | host = req.get_host() | ||
139 | if not host: | ||
140 | raise urlllib2.URLError('no host given') | ||
141 | |||
142 | h = http_class(host, timeout=req.timeout) # will parse host:port | ||
143 | h.set_debuglevel(self._debuglevel) | ||
144 | |||
145 | headers = dict(req.unredirected_hdrs) | ||
146 | headers.update(dict((k, v) for k, v in req.headers.items() | ||
147 | if k not in headers)) | ||
148 | |||
149 | # We want to make an HTTP/1.1 request, but the addinfourl | ||
150 | # class isn't prepared to deal with a persistent connection. | ||
151 | # It will try to read all remaining data from the socket, | ||
152 | # which will block while the server waits for the next request. | ||
153 | # So make sure the connection gets closed after the (only) | ||
154 | # request. | ||
155 | |||
156 | # Don't close connection when connection_cache is enabled, | ||
157 | if fetch.connection_cache is None: | ||
158 | headers["Connection"] = "close" | ||
159 | else: | ||
160 | headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 | ||
161 | |||
162 | headers = dict( | ||
163 | (name.title(), val) for name, val in headers.items()) | ||
164 | |||
165 | if req._tunnel_host: | ||
166 | tunnel_headers = {} | ||
167 | proxy_auth_hdr = "Proxy-Authorization" | ||
168 | if proxy_auth_hdr in headers: | ||
169 | tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] | ||
170 | # Proxy-Authorization should not be sent to origin | ||
171 | # server. | ||
172 | del headers[proxy_auth_hdr] | ||
173 | h.set_tunnel(req._tunnel_host, headers=tunnel_headers) | ||
174 | |||
175 | try: | ||
176 | h.request(req.get_method(), req.get_selector(), req.data, headers) | ||
177 | except socket.error, err: # XXX what error? | ||
178 | # Don't close connection when cache is enabled. | ||
179 | if fetch.connection_cache is None: | ||
180 | h.close() | ||
181 | raise urllib2.URLError(err) | ||
182 | else: | ||
183 | try: | ||
184 | r = h.getresponse(buffering=True) | ||
185 | except TypeError: # buffering kw not supported | ||
186 | r = h.getresponse() | ||
187 | |||
188 | # Pick apart the HTTPResponse object to get the addinfourl | ||
189 | # object initialized properly. | ||
190 | |||
191 | # Wrap the HTTPResponse object in socket's file object adapter | ||
192 | # for Windows. That adapter calls recv(), so delegate recv() | ||
193 | # to read(). This weird wrapping allows the returned object to | ||
194 | # have readline() and readlines() methods. | ||
195 | |||
196 | # XXX It might be better to extract the read buffering code | ||
197 | # out of socket._fileobject() and into a base class. | ||
198 | r.recv = r.read | ||
199 | |||
200 | # no data, just have to read | ||
201 | r.read() | ||
202 | class fp_dummy(object): | ||
203 | def read(self): | ||
204 | return "" | ||
205 | def readline(self): | ||
206 | return "" | ||
207 | def close(self): | ||
208 | pass | ||
209 | |||
210 | resp = addinfourl(fp_dummy(), r.msg, req.get_full_url()) | ||
211 | resp.code = r.status | ||
212 | resp.msg = r.reason | ||
213 | |||
214 | # Close connection when server request it. | ||
215 | if fetch.connection_cache is not None: | ||
216 | if 'Connection' in r.msg and r.msg['Connection'] == 'close': | ||
217 | fetch.connection_cache.remove_connection(h.host, h.port) | ||
218 | |||
219 | return resp | ||
220 | |||
221 | def export_proxies(d): | ||
222 | variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', | ||
223 | 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY'] | ||
224 | |||
225 | for v in variables: | ||
226 | if not v in os.environ.keys(): | ||
227 | os.environ[v] = d.getVar(v, True) or '' | ||
228 | |||
229 | def head_method(self): | ||
230 | return "HEAD" | ||
231 | |||
232 | export_proxies(d) | ||
233 | urllib2.Request.get_method = head_method | ||
234 | opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler) | ||
235 | urllib2.install_opener(opener) | ||
104 | 236 | ||
105 | uri = ud.url.split(";")[0] | 237 | uri = ud.url.split(";")[0] |
106 | fetchcmd = self.basecmd + " --spider '%s'" % uri | 238 | try: |
107 | 239 | f = urllib2.urlopen(uri) | |
108 | self._runwget(ud, d, fetchcmd, True) | 240 | except: |
109 | 241 | return False | |
110 | return True | 242 | return True |
111 | 243 | ||
112 | def _parse_path(self, regex, s): | 244 | def _parse_path(self, regex, s): |