summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
authorRoss Burton <ross.burton@intel.com>2016-01-20 13:10:25 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2016-01-26 22:34:45 +0000
commit9f22898e0f9155a75c6b6f44869b129e505910e8 (patch)
treee118952bf5154c8dafcabe885ae9fa2bb2dfa922 /bitbake/lib/bb/fetch2
parentd11cc29758a07b65a773765208ab3b95dcbe4bf4 (diff)
downloadpoky-9f22898e0f9155a75c6b6f44869b129e505910e8.tar.gz
bitbake: fetch2/wget: fallback to GET if HEAD is rejected in checkstatus()
The core change here is to fall back to GET requests if HEAD is rejected in the checkstatus() method, as you can't do a HEAD on Amazon S3 (used by Github archives). This meant removing the monkey patch that the default method was GET and adding a fixed redirect handler that doesn't reset to GET. Also, change the way the opener is constructed from an if/elif cluster to a conditionally constructed list. (Bitbake rev: 6ec70d5d2e330b41b932b0a655b838a5f37df01e) Signed-off-by: Ross Burton <ross.burton@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/fetch2')
-rw-r--r--bitbake/lib/bb/fetch2/wget.py72
1 files changed, 49 insertions, 23 deletions
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index c8c6d5ce83..5a31730a4a 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -235,38 +235,64 @@ class Wget(FetchMethod):
235 235
236 return exported 236 return exported
237 237
238 def head_method(self): 238 class HTTPMethodFallback(urllib2.BaseHandler):
239 return "HEAD" 239 """
240 240 Fallback to GET if HEAD is not allowed (405 HTTP error)
241 """
242 def http_error_405(self, req, fp, code, msg, headers):
243 fp.read()
244 fp.close()
245
246 newheaders = dict((k,v) for k,v in req.headers.items()
247 if k.lower() not in ("content-length", "content-type"))
248 return self.parent.open(urllib2.Request(req.get_full_url(),
249 headers=newheaders,
250 origin_req_host=req.get_origin_req_host(),
251 unverifiable=True))
252
253 """
254 Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
255 Forbidden when they actually mean 405 Method Not Allowed.
256 """
257 http_error_403 = http_error_405
258
259 """
260 Some servers (e.g. FusionForge) returns 406 Not Acceptable when they
261 actually mean 405 Method Not Allowed.
262 """
263 http_error_406 = http_error_405
264
265 class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
266 """
267 urllib2.HTTPRedirectHandler resets the method to GET on redirect,
268 when we want to follow redirects using the original method.
269 """
270 def redirect_request(self, req, fp, code, msg, headers, newurl):
271 newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
272 newreq.get_method = lambda: req.get_method()
273 return newreq
241 exported_proxies = export_proxies(d) 274 exported_proxies = export_proxies(d)
242 275
276 handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
277 if export_proxies:
278 handlers.append(urllib2.ProxyHandler())
279 handlers.append(CacheHTTPHandler())
243 # XXX: Since Python 2.7.9 ssl cert validation is enabled by default 280 # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
244 # see PEP-0476, this causes verification errors on some https servers 281 # see PEP-0476, this causes verification errors on some https servers
245 # so disable by default. 282 # so disable by default.
246 import ssl 283 import ssl
247 ssl_context = None
248 if hasattr(ssl, '_create_unverified_context'): 284 if hasattr(ssl, '_create_unverified_context'):
249 ssl_context = ssl._create_unverified_context() 285 handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
250 286 opener = urllib2.build_opener(*handlers)
251 if exported_proxies == True and ssl_context is not None:
252 opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler,
253 urllib2.HTTPSHandler(context=ssl_context))
254 elif exported_proxies == False and ssl_context is not None:
255 opener = urllib2.build_opener(CacheHTTPHandler,
256 urllib2.HTTPSHandler(context=ssl_context))
257 elif exported_proxies == True and ssl_context is None:
258 opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler)
259 else:
260 opener = urllib2.build_opener(CacheHTTPHandler)
261
262 urllib2.Request.get_method = head_method
263 urllib2.install_opener(opener)
264
265 uri = ud.url.split(";")[0]
266 287
267 try: 288 try:
268 urllib2.urlopen(uri) 289 uri = ud.url.split(";")[0]
269 except: 290 r = urllib2.Request(uri)
291 r.get_method = lambda: "HEAD"
292 opener.open(r)
293 except urllib2.URLError as e:
294 # debug for now to avoid spamming the logs in e.g. remote sstate searches
295 logger.debug(2, "checkstatus() urlopen failed: %s" % e)
270 return False 296 return False
271 return True 297 return True
272 298