From c8f64501ad9198572d0a7815928068989ccff556 Mon Sep 17 00:00:00 2001 From: Patrick Ohly Date: Mon, 17 Jul 2017 15:25:10 +0200 Subject: bitbake: fetch2/wget.py: improve error handling during sstate check When the sstate is accessed via HTTP, the existence check can fail due to network issues, in which case bitbake silently continues without sstate. One such network issue is an HTTP server like Python's own SimpleHTTP which closes the TCP connection despite an explicit "Keep-Alive" in the HTTP request header. The server does that without a "close" in the HTTP response header, so the socket remains in the connection cache, leading to "urlopen failed: " (only visible in "bitbake -D -D" output) when trying to use the cached connection again. The connection might also get closed for other reasons (proxy, timeouts, etc.), so this is something that the client should be able to handle. This is achieved by checking for the error, removing the bad connection, and letting the check_status() method try again with a new connection. It is necessary to let the second attempt fail permanently, because bad proxy setups have been observed to also lead to such broken connections. In that case, we need to abort for real after trying twice, otherwise a build would just hang forever. [YOCTO #11782] (Bitbake rev: 6fa07752bbd3ac345cd8617da49a70e0b2dd565f) Signed-off-by: Patrick Ohly Signed-off-by: Richard Purdie --- bitbake/lib/bb/fetch2/wget.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index ae0ffa8c97..208ee9bdd6 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py @@ -30,6 +30,7 @@ import tempfile import subprocess import os import logging +import errno import bb import bb.progress import urllib.request, urllib.parse, urllib.error @@ -206,8 +207,21 @@ class Wget(FetchMethod): h.request(req.get_method(), req.selector, req.data, headers) except socket.error as err: # XXX what error? # Don't close connection when cache is enabled. + # Instead, try to detect connections that are no longer + # usable (for example, closed unexpectedly) and remove + # them from the cache. if fetch.connection_cache is None: h.close() + elif isinstance(err, OSError) and err.errno == errno.EBADF: + # This happens when the server closes the connection despite the Keep-Alive. + # Apparently urllib then uses the file descriptor, expecting it to be + # connected, when in reality the connection is already gone. + # We let the request fail and expect it to be + # tried once more ("try_again" in check_status()), + # with the dead connection removed from the cache. + # If it still fails, we give up, which can happend for bad + # HTTP proxy settings. + fetch.connection_cache.remove_connection(h.host, h.port) raise urllib.error.URLError(err) else: try: -- cgit v1.2.3-54-g00ecf