Bug 1309912 - Add explicit timeout for urllib2.urlopen() instead of relying on global timeout draft
authorArmen Zambrano Gasparnian <armenzg@mozilla.com>
Thu, 13 Oct 2016 11:46:51 -0400
changeset 424837 71f8c581fb886711f8691ac34da065e0cfffe0ae
parent 424813 99c64f6f475b87e9cb22edaa530d938884030068
child 533774 bead8d45818b115e19e4a34f9315d85f57d856c8
push id32266
push userarmenzg@mozilla.com
push dateThu, 13 Oct 2016 15:47:30 +0000
bugs1309912
milestone52.0a1
Bug 1309912 - Add explicit timeout for urllib2.urlopen() instead of relying on global timeout For Mozharness' download and extract action we're blocking when trying to read the contents from a url. This mostly happens on Mac machines. urllib2.urlopen()'s documentation mentions about a global timeout, however, it does not seem to kick in or perhaps this won't do what we expect it to. MozReview-Commit-ID: L9IDnDSjgzO
testing/mozharness/mozharness/base/script.py
--- a/testing/mozharness/mozharness/base/script.py
+++ b/testing/mozharness/mozharness/base/script.py
@@ -372,27 +372,29 @@ class ScriptMixin(PlatformMixin):
             expected_file_size = os.stat(url.replace('file://', '')).st_size
 
             # In case we're referrencing a file without file://
             if parsed_url.scheme == '':
                 url = 'file://%s' % os.path.abspath(url)
                 parsed_url = urlparse.urlparse(url)
 
         request = urllib2.Request(url)
-        # Exceptions to be retried:
-        # Bug 1300663 - HTTPError: HTTP Error 404: Not Found
-        # Bug 1300413 - HTTPError: HTTP Error 500: Internal Server Error
-        # Bug 1300943 - HTTPError: HTTP Error 503: Service Unavailable
-        # Bug 1300953 - URLError: <urlopen error [Errno -2] Name or service not known>
-        # Bug 1301594 - URLError: <urlopen error [Errno 10054] An existing connection was ...
-        # Bug 1301597 - URLError: <urlopen error [Errno 8] _ssl.c:504: EOF occurred in ...
-        # Bug 1301855 - URLError: <urlopen error [Errno 60] Operation timed out>
-        # Bug 1302237 - URLError: <urlopen error [Errno 104] Connection reset by peer>
-        # Bug 1301807 - BadStatusLine: ''
-        response = urllib2.urlopen(request)
+        # When calling fetch_url_into_memory() you should retry when we raise one of these exceptions:
+        # * Bug 1300663 - HTTPError: HTTP Error 404: Not Found
+        # * Bug 1300413 - HTTPError: HTTP Error 500: Internal Server Error
+        # * Bug 1300943 - HTTPError: HTTP Error 503: Service Unavailable
+        # * Bug 1300953 - URLError: <urlopen error [Errno -2] Name or service not known>
+        # * Bug 1301594 - URLError: <urlopen error [Errno 10054] An existing connection was ...
+        # * Bug 1301597 - URLError: <urlopen error [Errno 8] _ssl.c:504: EOF occurred in ...
+        # * Bug 1301855 - URLError: <urlopen error [Errno 60] Operation timed out>
+        # * Bug 1302237 - URLError: <urlopen error [Errno 104] Connection reset by peer>
+        # * Bug 1301807 - BadStatusLine: ''
+        #
+        # Bug 1309912 - Adding timeout in hopes to solve blocking on response.read() (bug 1300413)
+        response = urllib2.urlopen(request, timeout=30)
 
         if parsed_url.scheme in ('http', 'https'):
             expected_file_size = int(response.headers.get('Content-Length'))
 
         self.info('Http code: {}'.format(response.getcode()))
         for k in sorted(response.headers.keys()):
             if k.lower().startswith('x-amz-') or k in ('Content-Encoding', 'Content-Type', 'via'):
                 self.info('{}: {}'.format(k, response.headers.get(k)))