diff options
Diffstat (limited to 'bitbake/lib/bb/fetch2/wget.py')
-rw-r--r-- | bitbake/lib/bb/fetch2/wget.py | 48 |
1 files changed, 24 insertions, 24 deletions
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index 8bc9e93ca0..d688fd9d02 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -31,7 +31,7 @@ import subprocess | |||
31 | import os | 31 | import os |
32 | import logging | 32 | import logging |
33 | import bb | 33 | import bb |
34 | import urllib | 34 | import urllib.request, urllib.parse, urllib.error |
35 | from bb import data | 35 | from bb import data |
36 | from bb.fetch2 import FetchMethod | 36 | from bb.fetch2 import FetchMethod |
37 | from bb.fetch2 import FetchError | 37 | from bb.fetch2 import FetchError |
@@ -62,9 +62,9 @@ class Wget(FetchMethod): | |||
62 | else: | 62 | else: |
63 | ud.basename = os.path.basename(ud.path) | 63 | ud.basename = os.path.basename(ud.path) |
64 | 64 | ||
65 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) | 65 | ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d) |
66 | if not ud.localfile: | 66 | if not ud.localfile: |
67 | ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d) | 67 | ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d) |
68 | 68 | ||
69 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" | 69 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" |
70 | 70 | ||
@@ -105,11 +105,11 @@ class Wget(FetchMethod): | |||
105 | return True | 105 | return True |
106 | 106 | ||
107 | def checkstatus(self, fetch, ud, d): | 107 | def checkstatus(self, fetch, ud, d): |
108 | import urllib2, socket, httplib | 108 | import urllib.request, urllib.error, urllib.parse, socket, http.client |
109 | from urllib import addinfourl | 109 | from urllib.response import addinfourl |
110 | from bb.fetch2 import FetchConnectionCache | 110 | from bb.fetch2 import FetchConnectionCache |
111 | 111 | ||
112 | class HTTPConnectionCache(httplib.HTTPConnection): | 112 | class HTTPConnectionCache(http.client.HTTPConnection): |
113 | if fetch.connection_cache: | 113 | if fetch.connection_cache: |
114 | def connect(self): | 114 | def connect(self): |
115 | """Connect to the host and port specified in __init__.""" | 115 | """Connect to the host and port specified in __init__.""" |
@@ -125,7 +125,7 @@ class Wget(FetchMethod): | |||
125 | if self._tunnel_host: | 125 | if self._tunnel_host: |
126 | self._tunnel() | 126 | self._tunnel() |
127 | 127 | ||
128 | class CacheHTTPHandler(urllib2.HTTPHandler): | 128 | class CacheHTTPHandler(urllib.request.HTTPHandler): |
129 | def http_open(self, req): | 129 | def http_open(self, req): |
130 | return self.do_open(HTTPConnectionCache, req) | 130 | return self.do_open(HTTPConnectionCache, req) |
131 | 131 | ||
@@ -139,7 +139,7 @@ class Wget(FetchMethod): | |||
139 | - geturl(): return the original request URL | 139 | - geturl(): return the original request URL |
140 | - code: HTTP status code | 140 | - code: HTTP status code |
141 | """ | 141 | """ |
142 | host = req.get_host() | 142 | host = req.host |
143 | if not host: | 143 | if not host: |
144 | raise urlllib2.URLError('no host given') | 144 | raise urlllib2.URLError('no host given') |
145 | 145 | ||
@@ -147,7 +147,7 @@ class Wget(FetchMethod): | |||
147 | h.set_debuglevel(self._debuglevel) | 147 | h.set_debuglevel(self._debuglevel) |
148 | 148 | ||
149 | headers = dict(req.unredirected_hdrs) | 149 | headers = dict(req.unredirected_hdrs) |
150 | headers.update(dict((k, v) for k, v in req.headers.items() | 150 | headers.update(dict((k, v) for k, v in list(req.headers.items()) |
151 | if k not in headers)) | 151 | if k not in headers)) |
152 | 152 | ||
153 | # We want to make an HTTP/1.1 request, but the addinfourl | 153 | # We want to make an HTTP/1.1 request, but the addinfourl |
@@ -164,7 +164,7 @@ class Wget(FetchMethod): | |||
164 | headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 | 164 | headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 |
165 | 165 | ||
166 | headers = dict( | 166 | headers = dict( |
167 | (name.title(), val) for name, val in headers.items()) | 167 | (name.title(), val) for name, val in list(headers.items())) |
168 | 168 | ||
169 | if req._tunnel_host: | 169 | if req._tunnel_host: |
170 | tunnel_headers = {} | 170 | tunnel_headers = {} |
@@ -177,12 +177,12 @@ class Wget(FetchMethod): | |||
177 | h.set_tunnel(req._tunnel_host, headers=tunnel_headers) | 177 | h.set_tunnel(req._tunnel_host, headers=tunnel_headers) |
178 | 178 | ||
179 | try: | 179 | try: |
180 | h.request(req.get_method(), req.get_selector(), req.data, headers) | 180 | h.request(req.get_method(), req.selector, req.data, headers) |
181 | except socket.error, err: # XXX what error? | 181 | except socket.error as err: # XXX what error? |
182 | # Don't close connection when cache is enabled. | 182 | # Don't close connection when cache is enabled. |
183 | if fetch.connection_cache is None: | 183 | if fetch.connection_cache is None: |
184 | h.close() | 184 | h.close() |
185 | raise urllib2.URLError(err) | 185 | raise urllib.error.URLError(err) |
186 | else: | 186 | else: |
187 | try: | 187 | try: |
188 | r = h.getresponse(buffering=True) | 188 | r = h.getresponse(buffering=True) |
@@ -222,7 +222,7 @@ class Wget(FetchMethod): | |||
222 | 222 | ||
223 | return resp | 223 | return resp |
224 | 224 | ||
225 | class HTTPMethodFallback(urllib2.BaseHandler): | 225 | class HTTPMethodFallback(urllib.request.BaseHandler): |
226 | """ | 226 | """ |
227 | Fallback to GET if HEAD is not allowed (405 HTTP error) | 227 | Fallback to GET if HEAD is not allowed (405 HTTP error) |
228 | """ | 228 | """ |
@@ -230,11 +230,11 @@ class Wget(FetchMethod): | |||
230 | fp.read() | 230 | fp.read() |
231 | fp.close() | 231 | fp.close() |
232 | 232 | ||
233 | newheaders = dict((k,v) for k,v in req.headers.items() | 233 | newheaders = dict((k,v) for k,v in list(req.headers.items()) |
234 | if k.lower() not in ("content-length", "content-type")) | 234 | if k.lower() not in ("content-length", "content-type")) |
235 | return self.parent.open(urllib2.Request(req.get_full_url(), | 235 | return self.parent.open(urllib.request.Request(req.get_full_url(), |
236 | headers=newheaders, | 236 | headers=newheaders, |
237 | origin_req_host=req.get_origin_req_host(), | 237 | origin_req_host=req.origin_req_host, |
238 | unverifiable=True)) | 238 | unverifiable=True)) |
239 | 239 | ||
240 | """ | 240 | """ |
@@ -249,35 +249,35 @@ class Wget(FetchMethod): | |||
249 | """ | 249 | """ |
250 | http_error_406 = http_error_405 | 250 | http_error_406 = http_error_405 |
251 | 251 | ||
252 | class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler): | 252 | class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): |
253 | """ | 253 | """ |
254 | urllib2.HTTPRedirectHandler resets the method to GET on redirect, | 254 | urllib2.HTTPRedirectHandler resets the method to GET on redirect, |
255 | when we want to follow redirects using the original method. | 255 | when we want to follow redirects using the original method. |
256 | """ | 256 | """ |
257 | def redirect_request(self, req, fp, code, msg, headers, newurl): | 257 | def redirect_request(self, req, fp, code, msg, headers, newurl): |
258 | newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) | 258 | newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) |
259 | newreq.get_method = lambda: req.get_method() | 259 | newreq.get_method = lambda: req.get_method() |
260 | return newreq | 260 | return newreq |
261 | exported_proxies = export_proxies(d) | 261 | exported_proxies = export_proxies(d) |
262 | 262 | ||
263 | handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] | 263 | handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] |
264 | if export_proxies: | 264 | if export_proxies: |
265 | handlers.append(urllib2.ProxyHandler()) | 265 | handlers.append(urllib.request.ProxyHandler()) |
266 | handlers.append(CacheHTTPHandler()) | 266 | handlers.append(CacheHTTPHandler()) |
267 | # XXX: Since Python 2.7.9 ssl cert validation is enabled by default | 267 | # XXX: Since Python 2.7.9 ssl cert validation is enabled by default |
268 | # see PEP-0476, this causes verification errors on some https servers | 268 | # see PEP-0476, this causes verification errors on some https servers |
269 | # so disable by default. | 269 | # so disable by default. |
270 | import ssl | 270 | import ssl |
271 | if hasattr(ssl, '_create_unverified_context'): | 271 | if hasattr(ssl, '_create_unverified_context'): |
272 | handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context())) | 272 | handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context())) |
273 | opener = urllib2.build_opener(*handlers) | 273 | opener = urllib.request.build_opener(*handlers) |
274 | 274 | ||
275 | try: | 275 | try: |
276 | uri = ud.url.split(";")[0] | 276 | uri = ud.url.split(";")[0] |
277 | r = urllib2.Request(uri) | 277 | r = urllib.request.Request(uri) |
278 | r.get_method = lambda: "HEAD" | 278 | r.get_method = lambda: "HEAD" |
279 | opener.open(r) | 279 | opener.open(r) |
280 | except urllib2.URLError as e: | 280 | except urllib.error.URLError as e: |
281 | # debug for now to avoid spamming the logs in e.g. remote sstate searches | 281 | # debug for now to avoid spamming the logs in e.g. remote sstate searches |
282 | logger.debug(2, "checkstatus() urlopen failed: %s" % e) | 282 | logger.debug(2, "checkstatus() urlopen failed: %s" % e) |
283 | return False | 283 | return False |