Bug 1306189 - Add proper signature + better logging + stop trying to upload the corrupted file. r=dustin draft
authorArmen Zambrano Gasparnian <armenzg@mozilla.com>
Thu, 29 Sep 2016 11:08:27 -0400
changeset 419175 827be8246ca53008bd1de4afc08a3e1b9e0bc2b1
parent 418992 f7d5008ee2ab9200052e45ad6ecc3f3a348f7f86
child 532518 7eb87e4a05c109a30c3f717ca03b38d3cbda6148
push id30870
push userarmenzg@mozilla.com
push dateThu, 29 Sep 2016 21:00:54 +0000
reviewersdustin
bugs1306189
milestone52.0a1
Bug 1306189 - Add proper signature + better logging + stop trying to upload the corrupted file. r=dustin MozReview-Commit-ID: EBsIFuXN3E7
testing/mozharness/mozharness/base/script.py
--- a/testing/mozharness/mozharness/base/script.py
+++ b/testing/mozharness/mozharness/base/script.py
@@ -388,19 +388,19 @@ class ScriptMixin(PlatformMixin):
         # Bug 1302237 - URLError: <urlopen error [Errno 104] Connection reset by peer>
         # Bug 1301807 - BadStatusLine: ''
         response = urllib2.urlopen(request)
 
         if parsed_url.scheme in ('http', 'https'):
             expected_file_size = int(response.headers.get('Content-Length'))
 
         self.info('Http code: {}'.format(response.getcode()))
-        for k in ('Content-Encoding', 'Content-Type', 'via', 'x-amz-cf-id',
-                  'x-amz-version-id', 'x-cache'):
-            self.info('{}: {}'.format(k, response.headers.get(k)))
+        for k in sorted(response.headers.keys()):
+            if k.lower().startswith('x-amz-') or k in ('Content-Encoding', 'Content-Type', 'via'):
+                self.info('{}: {}'.format(k, response.headers.get(k)))
 
         file_contents = response.read()
         obtained_file_size = len(file_contents)
         self.info('Expected file size: {}'.format(expected_file_size))
         self.info('Obtained file size: {}'.format(obtained_file_size))
 
         if obtained_file_size != expected_file_size:
             raise FetchedIncorrectFilesize(
@@ -682,25 +682,19 @@ class ScriptMixin(PlatformMixin):
         )
 
         # 2) We're guaranteed to have download the file with error_level=FATAL
         #    Let's unpack the file
         function, kwargs = _determine_extraction_method_and_kwargs(url)
         try:
             function(**kwargs)
         except zipfile.BadZipfile:
-            # Bug 1305752 - Sometimes a good download turns out to be a
-            # corrupted zipfile. Let's upload the file for inspection
-            filepath = os.path.join(self.query_abs_dirs()['abs_upload_dir'], url.split('/')[-1])
-            self.info('Storing corrupted file to {}'.format(filepath))
-            with open(filepath, 'w') as f:
-                f.write(compressed_file.read())
-
-            # Dump the exception and exit
-            self.exception(level=FATAL)
+            # Bug 1306189 - Sometimes a good download turns out to be a
+            # corrupted zipfile. Let's create a signature that is easy to match
+            self.fatal('Check bug 1306189 for details on downloading a truncated zip file.')
 
 
     def load_json_url(self, url, error_level=None, *args, **kwargs):
         """ Returns a json object from a url (it retries). """
         contents = self._retry_download(
             url=url, error_level=error_level, *args, **kwargs
         )
         return json.loads(contents.read())