diff options
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 36 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/local.py | 4 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/npm.py | 6 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/perforce.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/sftp.py | 8 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/ssh.py | 6 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/wget.py | 48 |
7 files changed, 51 insertions, 59 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 14fe3c753a..be01bdbb34 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py | |||
@@ -28,27 +28,23 @@ BitBake build tools. | |||
28 | import os, re | 28 | import os, re |
29 | import signal | 29 | import signal |
30 | import logging | 30 | import logging |
31 | import urllib | 31 | import urllib.request, urllib.parse, urllib.error |
32 | import urlparse | 32 | if 'git' not in urllib.parse.uses_netloc: |
33 | urllib.parse.uses_netloc.append('git') | ||
34 | import operator | ||
33 | import collections | 35 | import collections |
36 | import subprocess | ||
37 | import pickle | ||
34 | import bb.persist_data, bb.utils | 38 | import bb.persist_data, bb.utils |
35 | import bb.checksum | 39 | import bb.checksum |
36 | from bb import data | 40 | from bb import data |
37 | import bb.process | 41 | import bb.process |
38 | import subprocess | ||
39 | 42 | ||
40 | __version__ = "2" | 43 | __version__ = "2" |
41 | _checksum_cache = bb.checksum.FileChecksumCache() | 44 | _checksum_cache = bb.checksum.FileChecksumCache() |
42 | 45 | ||
43 | logger = logging.getLogger("BitBake.Fetcher") | 46 | logger = logging.getLogger("BitBake.Fetcher") |
44 | 47 | ||
45 | try: | ||
46 | import cPickle as pickle | ||
47 | except ImportError: | ||
48 | import pickle | ||
49 | logger.info("Importing cPickle failed. " | ||
50 | "Falling back to a very slow implementation.") | ||
51 | |||
52 | class BBFetchException(Exception): | 48 | class BBFetchException(Exception): |
53 | """Class all fetch exceptions inherit from""" | 49 | """Class all fetch exceptions inherit from""" |
54 | def __init__(self, message): | 50 | def __init__(self, message): |
@@ -230,14 +226,14 @@ class URI(object): | |||
230 | # them are not quite RFC compliant. | 226 | # them are not quite RFC compliant. |
231 | uri, param_str = (uri.split(";", 1) + [None])[:2] | 227 | uri, param_str = (uri.split(";", 1) + [None])[:2] |
232 | 228 | ||
233 | urlp = urlparse.urlparse(uri) | 229 | urlp = urllib.parse.urlparse(uri) |
234 | self.scheme = urlp.scheme | 230 | self.scheme = urlp.scheme |
235 | 231 | ||
236 | reparse = 0 | 232 | reparse = 0 |
237 | 233 | ||
238 | # Coerce urlparse to make URI scheme use netloc | 234 | # Coerce urlparse to make URI scheme use netloc |
239 | if not self.scheme in urlparse.uses_netloc: | 235 | if not self.scheme in urllib.parse.uses_netloc: |
240 | urlparse.uses_params.append(self.scheme) | 236 | urllib.parse.uses_params.append(self.scheme) |
241 | reparse = 1 | 237 | reparse = 1 |
242 | 238 | ||
243 | # Make urlparse happy(/ier) by converting local resources | 239 | # Make urlparse happy(/ier) by converting local resources |
@@ -248,7 +244,7 @@ class URI(object): | |||
248 | reparse = 1 | 244 | reparse = 1 |
249 | 245 | ||
250 | if reparse: | 246 | if reparse: |
251 | urlp = urlparse.urlparse(uri) | 247 | urlp = urllib.parse.urlparse(uri) |
252 | 248 | ||
253 | # Identify if the URI is relative or not | 249 | # Identify if the URI is relative or not |
254 | if urlp.scheme in self._relative_schemes and \ | 250 | if urlp.scheme in self._relative_schemes and \ |
@@ -264,7 +260,7 @@ class URI(object): | |||
264 | if urlp.password: | 260 | if urlp.password: |
265 | self.userinfo += ':%s' % urlp.password | 261 | self.userinfo += ':%s' % urlp.password |
266 | 262 | ||
267 | self.path = urllib.unquote(urlp.path) | 263 | self.path = urllib.parse.unquote(urlp.path) |
268 | 264 | ||
269 | if param_str: | 265 | if param_str: |
270 | self.params = self._param_str_split(param_str, ";") | 266 | self.params = self._param_str_split(param_str, ";") |
@@ -312,11 +308,11 @@ class URI(object): | |||
312 | 308 | ||
313 | @property | 309 | @property |
314 | def path_quoted(self): | 310 | def path_quoted(self): |
315 | return urllib.quote(self.path) | 311 | return urllib.parse.quote(self.path) |
316 | 312 | ||
317 | @path_quoted.setter | 313 | @path_quoted.setter |
318 | def path_quoted(self, path): | 314 | def path_quoted(self, path): |
319 | self.path = urllib.unquote(path) | 315 | self.path = urllib.parse.unquote(path) |
320 | 316 | ||
321 | @property | 317 | @property |
322 | def path(self): | 318 | def path(self): |
@@ -398,7 +394,7 @@ def decodeurl(url): | |||
398 | s1, s2 = s.split('=') | 394 | s1, s2 = s.split('=') |
399 | p[s1] = s2 | 395 | p[s1] = s2 |
400 | 396 | ||
401 | return type, host, urllib.unquote(path), user, pswd, p | 397 | return type, host, urllib.parse.unquote(path), user, pswd, p |
402 | 398 | ||
403 | def encodeurl(decoded): | 399 | def encodeurl(decoded): |
404 | """Encodes a URL from tokens (scheme, network location, path, | 400 | """Encodes a URL from tokens (scheme, network location, path, |
@@ -422,7 +418,7 @@ def encodeurl(decoded): | |||
422 | # Standardise path to ensure comparisons work | 418 | # Standardise path to ensure comparisons work |
423 | while '//' in path: | 419 | while '//' in path: |
424 | path = path.replace("//", "/") | 420 | path = path.replace("//", "/") |
425 | url += "%s" % urllib.quote(path) | 421 | url += "%s" % urllib.parse.quote(path) |
426 | if p: | 422 | if p: |
427 | for parm in p: | 423 | for parm in p: |
428 | url += ";%s=%s" % (parm, p[parm]) | 424 | url += ";%s=%s" % (parm, p[parm]) |
@@ -1735,7 +1731,7 @@ class FetchConnectionCache(object): | |||
1735 | del self.cache[cn] | 1731 | del self.cache[cn] |
1736 | 1732 | ||
1737 | def close_connections(self): | 1733 | def close_connections(self): |
1738 | for cn in self.cache.keys(): | 1734 | for cn in list(self.cache.keys()): |
1739 | self.cache[cn].close() | 1735 | self.cache[cn].close() |
1740 | del self.cache[cn] | 1736 | del self.cache[cn] |
1741 | 1737 | ||
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py index 303a52b638..51ca78d12b 100644 --- a/bitbake/lib/bb/fetch2/local.py +++ b/bitbake/lib/bb/fetch2/local.py | |||
@@ -26,7 +26,7 @@ BitBake build tools. | |||
26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig | 26 | # Based on functions from the base bb module, Copyright 2003 Holger Schurig |
27 | 27 | ||
28 | import os | 28 | import os |
29 | import urllib | 29 | import urllib.request, urllib.parse, urllib.error |
30 | import bb | 30 | import bb |
31 | import bb.utils | 31 | import bb.utils |
32 | from bb import data | 32 | from bb import data |
@@ -42,7 +42,7 @@ class Local(FetchMethod): | |||
42 | 42 | ||
43 | def urldata_init(self, ud, d): | 43 | def urldata_init(self, ud, d): |
44 | # We don't set localfile as for this fetcher the file is already local! | 44 | # We don't set localfile as for this fetcher the file is already local! |
45 | ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0]) | 45 | ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0]) |
46 | ud.basename = os.path.basename(ud.decodedurl) | 46 | ud.basename = os.path.basename(ud.decodedurl) |
47 | ud.basepath = ud.decodedurl | 47 | ud.basepath = ud.decodedurl |
48 | ud.needdonestamp = False | 48 | ud.needdonestamp = False |
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py index d9e46b2e8c..2fd43034ba 100644 --- a/bitbake/lib/bb/fetch2/npm.py +++ b/bitbake/lib/bb/fetch2/npm.py | |||
@@ -20,7 +20,7 @@ Usage in the recipe: | |||
20 | 20 | ||
21 | import os | 21 | import os |
22 | import sys | 22 | import sys |
23 | import urllib | 23 | import urllib.request, urllib.parse, urllib.error |
24 | import json | 24 | import json |
25 | import subprocess | 25 | import subprocess |
26 | import signal | 26 | import signal |
@@ -196,9 +196,9 @@ class Npm(FetchMethod): | |||
196 | optdepsfound[dep] = dependencies[dep] | 196 | optdepsfound[dep] = dependencies[dep] |
197 | else: | 197 | else: |
198 | depsfound[dep] = dependencies[dep] | 198 | depsfound[dep] = dependencies[dep] |
199 | for dep, version in optdepsfound.iteritems(): | 199 | for dep, version in optdepsfound.items(): |
200 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True) | 200 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True) |
201 | for dep, version in depsfound.iteritems(): | 201 | for dep, version in depsfound.items(): |
202 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud) | 202 | self._getdependencies(dep, data[pkg]['deps'], version, d, ud) |
203 | 203 | ||
204 | def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest): | 204 | def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest): |
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py index 1aef246942..ce3cda2670 100644 --- a/bitbake/lib/bb/fetch2/perforce.py +++ b/bitbake/lib/bb/fetch2/perforce.py | |||
@@ -61,7 +61,7 @@ class Perforce(FetchMethod): | |||
61 | keys.append(key) | 61 | keys.append(key) |
62 | values.append(value) | 62 | values.append(value) |
63 | 63 | ||
64 | parm = dict(zip(keys, values)) | 64 | parm = dict(list(zip(keys, values))) |
65 | path = "//" + path.split(';')[0] | 65 | path = "//" + path.split(';')[0] |
66 | host += ":%s" % (port) | 66 | host += ":%s" % (port) |
67 | parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) | 67 | parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm) |
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py index cb2f753a8e..7989fccc75 100644 --- a/bitbake/lib/bb/fetch2/sftp.py +++ b/bitbake/lib/bb/fetch2/sftp.py | |||
@@ -61,8 +61,7 @@ SRC_URI = "sftp://user@host.example.com/dir/path.file.txt" | |||
61 | 61 | ||
62 | import os | 62 | import os |
63 | import bb | 63 | import bb |
64 | import urllib | 64 | import urllib.request, urllib.parse, urllib.error |
65 | import commands | ||
66 | from bb import data | 65 | from bb import data |
67 | from bb.fetch2 import URI | 66 | from bb.fetch2 import URI |
68 | from bb.fetch2 import FetchMethod | 67 | from bb.fetch2 import FetchMethod |
@@ -93,7 +92,7 @@ class SFTP(FetchMethod): | |||
93 | else: | 92 | else: |
94 | ud.basename = os.path.basename(ud.path) | 93 | ud.basename = os.path.basename(ud.path) |
95 | 94 | ||
96 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) | 95 | ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d) |
97 | 96 | ||
98 | def download(self, ud, d): | 97 | def download(self, ud, d): |
99 | """Fetch urls""" | 98 | """Fetch urls""" |
@@ -121,8 +120,7 @@ class SFTP(FetchMethod): | |||
121 | 120 | ||
122 | remote = '%s%s:%s' % (user, urlo.hostname, path) | 121 | remote = '%s%s:%s' % (user, urlo.hostname, path) |
123 | 122 | ||
124 | cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote), | 123 | cmd = '%s %s %s %s' % (basecmd, port, remote, lpath) |
125 | commands.mkarg(lpath)) | ||
126 | 124 | ||
127 | bb.fetch2.check_network_access(d, cmd, ud.url) | 125 | bb.fetch2.check_network_access(d, cmd, ud.url) |
128 | runfetchcmd(cmd, d) | 126 | runfetchcmd(cmd, d) |
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py index 635578a711..56f9b7eb35 100644 --- a/bitbake/lib/bb/fetch2/ssh.py +++ b/bitbake/lib/bb/fetch2/ssh.py | |||
@@ -114,12 +114,10 @@ class SSH(FetchMethod): | |||
114 | fr = host | 114 | fr = host |
115 | fr += ':%s' % path | 115 | fr += ':%s' % path |
116 | 116 | ||
117 | |||
118 | import commands | ||
119 | cmd = 'scp -B -r %s %s %s/' % ( | 117 | cmd = 'scp -B -r %s %s %s/' % ( |
120 | portarg, | 118 | portarg, |
121 | commands.mkarg(fr), | 119 | fr, |
122 | commands.mkarg(dldir) | 120 | dldir |
123 | ) | 121 | ) |
124 | 122 | ||
125 | bb.fetch2.check_network_access(d, cmd, urldata.url) | 123 | bb.fetch2.check_network_access(d, cmd, urldata.url) |
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index 8bc9e93ca0..d688fd9d02 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -31,7 +31,7 @@ import subprocess | |||
31 | import os | 31 | import os |
32 | import logging | 32 | import logging |
33 | import bb | 33 | import bb |
34 | import urllib | 34 | import urllib.request, urllib.parse, urllib.error |
35 | from bb import data | 35 | from bb import data |
36 | from bb.fetch2 import FetchMethod | 36 | from bb.fetch2 import FetchMethod |
37 | from bb.fetch2 import FetchError | 37 | from bb.fetch2 import FetchError |
@@ -62,9 +62,9 @@ class Wget(FetchMethod): | |||
62 | else: | 62 | else: |
63 | ud.basename = os.path.basename(ud.path) | 63 | ud.basename = os.path.basename(ud.path) |
64 | 64 | ||
65 | ud.localfile = data.expand(urllib.unquote(ud.basename), d) | 65 | ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d) |
66 | if not ud.localfile: | 66 | if not ud.localfile: |
67 | ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d) | 67 | ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d) |
68 | 68 | ||
69 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" | 69 | self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" |
70 | 70 | ||
@@ -105,11 +105,11 @@ class Wget(FetchMethod): | |||
105 | return True | 105 | return True |
106 | 106 | ||
107 | def checkstatus(self, fetch, ud, d): | 107 | def checkstatus(self, fetch, ud, d): |
108 | import urllib2, socket, httplib | 108 | import urllib.request, urllib.error, urllib.parse, socket, http.client |
109 | from urllib import addinfourl | 109 | from urllib.response import addinfourl |
110 | from bb.fetch2 import FetchConnectionCache | 110 | from bb.fetch2 import FetchConnectionCache |
111 | 111 | ||
112 | class HTTPConnectionCache(httplib.HTTPConnection): | 112 | class HTTPConnectionCache(http.client.HTTPConnection): |
113 | if fetch.connection_cache: | 113 | if fetch.connection_cache: |
114 | def connect(self): | 114 | def connect(self): |
115 | """Connect to the host and port specified in __init__.""" | 115 | """Connect to the host and port specified in __init__.""" |
@@ -125,7 +125,7 @@ class Wget(FetchMethod): | |||
125 | if self._tunnel_host: | 125 | if self._tunnel_host: |
126 | self._tunnel() | 126 | self._tunnel() |
127 | 127 | ||
128 | class CacheHTTPHandler(urllib2.HTTPHandler): | 128 | class CacheHTTPHandler(urllib.request.HTTPHandler): |
129 | def http_open(self, req): | 129 | def http_open(self, req): |
130 | return self.do_open(HTTPConnectionCache, req) | 130 | return self.do_open(HTTPConnectionCache, req) |
131 | 131 | ||
@@ -139,7 +139,7 @@ class Wget(FetchMethod): | |||
139 | - geturl(): return the original request URL | 139 | - geturl(): return the original request URL |
140 | - code: HTTP status code | 140 | - code: HTTP status code |
141 | """ | 141 | """ |
142 | host = req.get_host() | 142 | host = req.host |
143 | if not host: | 143 | if not host: |
144 | raise urlllib2.URLError('no host given') | 144 | raise urlllib2.URLError('no host given') |
145 | 145 | ||
@@ -147,7 +147,7 @@ class Wget(FetchMethod): | |||
147 | h.set_debuglevel(self._debuglevel) | 147 | h.set_debuglevel(self._debuglevel) |
148 | 148 | ||
149 | headers = dict(req.unredirected_hdrs) | 149 | headers = dict(req.unredirected_hdrs) |
150 | headers.update(dict((k, v) for k, v in req.headers.items() | 150 | headers.update(dict((k, v) for k, v in list(req.headers.items()) |
151 | if k not in headers)) | 151 | if k not in headers)) |
152 | 152 | ||
153 | # We want to make an HTTP/1.1 request, but the addinfourl | 153 | # We want to make an HTTP/1.1 request, but the addinfourl |
@@ -164,7 +164,7 @@ class Wget(FetchMethod): | |||
164 | headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 | 164 | headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 |
165 | 165 | ||
166 | headers = dict( | 166 | headers = dict( |
167 | (name.title(), val) for name, val in headers.items()) | 167 | (name.title(), val) for name, val in list(headers.items())) |
168 | 168 | ||
169 | if req._tunnel_host: | 169 | if req._tunnel_host: |
170 | tunnel_headers = {} | 170 | tunnel_headers = {} |
@@ -177,12 +177,12 @@ class Wget(FetchMethod): | |||
177 | h.set_tunnel(req._tunnel_host, headers=tunnel_headers) | 177 | h.set_tunnel(req._tunnel_host, headers=tunnel_headers) |
178 | 178 | ||
179 | try: | 179 | try: |
180 | h.request(req.get_method(), req.get_selector(), req.data, headers) | 180 | h.request(req.get_method(), req.selector, req.data, headers) |
181 | except socket.error, err: # XXX what error? | 181 | except socket.error as err: # XXX what error? |
182 | # Don't close connection when cache is enabled. | 182 | # Don't close connection when cache is enabled. |
183 | if fetch.connection_cache is None: | 183 | if fetch.connection_cache is None: |
184 | h.close() | 184 | h.close() |
185 | raise urllib2.URLError(err) | 185 | raise urllib.error.URLError(err) |
186 | else: | 186 | else: |
187 | try: | 187 | try: |
188 | r = h.getresponse(buffering=True) | 188 | r = h.getresponse(buffering=True) |
@@ -222,7 +222,7 @@ class Wget(FetchMethod): | |||
222 | 222 | ||
223 | return resp | 223 | return resp |
224 | 224 | ||
225 | class HTTPMethodFallback(urllib2.BaseHandler): | 225 | class HTTPMethodFallback(urllib.request.BaseHandler): |
226 | """ | 226 | """ |
227 | Fallback to GET if HEAD is not allowed (405 HTTP error) | 227 | Fallback to GET if HEAD is not allowed (405 HTTP error) |
228 | """ | 228 | """ |
@@ -230,11 +230,11 @@ class Wget(FetchMethod): | |||
230 | fp.read() | 230 | fp.read() |
231 | fp.close() | 231 | fp.close() |
232 | 232 | ||
233 | newheaders = dict((k,v) for k,v in req.headers.items() | 233 | newheaders = dict((k,v) for k,v in list(req.headers.items()) |
234 | if k.lower() not in ("content-length", "content-type")) | 234 | if k.lower() not in ("content-length", "content-type")) |
235 | return self.parent.open(urllib2.Request(req.get_full_url(), | 235 | return self.parent.open(urllib.request.Request(req.get_full_url(), |
236 | headers=newheaders, | 236 | headers=newheaders, |
237 | origin_req_host=req.get_origin_req_host(), | 237 | origin_req_host=req.origin_req_host, |
238 | unverifiable=True)) | 238 | unverifiable=True)) |
239 | 239 | ||
240 | """ | 240 | """ |
@@ -249,35 +249,35 @@ class Wget(FetchMethod): | |||
249 | """ | 249 | """ |
250 | http_error_406 = http_error_405 | 250 | http_error_406 = http_error_405 |
251 | 251 | ||
252 | class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler): | 252 | class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): |
253 | """ | 253 | """ |
254 | urllib2.HTTPRedirectHandler resets the method to GET on redirect, | 254 | urllib2.HTTPRedirectHandler resets the method to GET on redirect, |
255 | when we want to follow redirects using the original method. | 255 | when we want to follow redirects using the original method. |
256 | """ | 256 | """ |
257 | def redirect_request(self, req, fp, code, msg, headers, newurl): | 257 | def redirect_request(self, req, fp, code, msg, headers, newurl): |
258 | newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) | 258 | newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) |
259 | newreq.get_method = lambda: req.get_method() | 259 | newreq.get_method = lambda: req.get_method() |
260 | return newreq | 260 | return newreq |
261 | exported_proxies = export_proxies(d) | 261 | exported_proxies = export_proxies(d) |
262 | 262 | ||
263 | handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] | 263 | handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] |
264 | if export_proxies: | 264 | if export_proxies: |
265 | handlers.append(urllib2.ProxyHandler()) | 265 | handlers.append(urllib.request.ProxyHandler()) |
266 | handlers.append(CacheHTTPHandler()) | 266 | handlers.append(CacheHTTPHandler()) |
267 | # XXX: Since Python 2.7.9 ssl cert validation is enabled by default | 267 | # XXX: Since Python 2.7.9 ssl cert validation is enabled by default |
268 | # see PEP-0476, this causes verification errors on some https servers | 268 | # see PEP-0476, this causes verification errors on some https servers |
269 | # so disable by default. | 269 | # so disable by default. |
270 | import ssl | 270 | import ssl |
271 | if hasattr(ssl, '_create_unverified_context'): | 271 | if hasattr(ssl, '_create_unverified_context'): |
272 | handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context())) | 272 | handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context())) |
273 | opener = urllib2.build_opener(*handlers) | 273 | opener = urllib.request.build_opener(*handlers) |
274 | 274 | ||
275 | try: | 275 | try: |
276 | uri = ud.url.split(";")[0] | 276 | uri = ud.url.split(";")[0] |
277 | r = urllib2.Request(uri) | 277 | r = urllib.request.Request(uri) |
278 | r.get_method = lambda: "HEAD" | 278 | r.get_method = lambda: "HEAD" |
279 | opener.open(r) | 279 | opener.open(r) |
280 | except urllib2.URLError as e: | 280 | except urllib.error.URLError as e: |
281 | # debug for now to avoid spamming the logs in e.g. remote sstate searches | 281 | # debug for now to avoid spamming the logs in e.g. remote sstate searches |
282 | logger.debug(2, "checkstatus() urlopen failed: %s" % e) | 282 | logger.debug(2, "checkstatus() urlopen failed: %s" % e) |
283 | return False | 283 | return False |