Bug 1306189 - Create upload dir before writing to it + more logging. r=dustin draft
authorArmen Zambrano Gasparnian <armenzg@mozilla.com>
Thu, 29 Sep 2016 11:08:27 -0400
changeset 419028 ada7525fdc64580825e10bd9a0ebf6c69578f974
parent 418992 f7d5008ee2ab9200052e45ad6ecc3f3a348f7f86
child 532475 b2e118d6f7b251be3167a1adad407628ac7436ce
push id30826
push userarmenzg@mozilla.com
push dateThu, 29 Sep 2016 15:14:21 +0000
reviewersdustin
bugs1306189
milestone52.0a1
Bug 1306189 - Create upload dir before writing to it + more logging. r=dustin MozReview-Commit-ID: EBsIFuXN3E7
testing/mozharness/mozharness/base/script.py
--- a/testing/mozharness/mozharness/base/script.py
+++ b/testing/mozharness/mozharness/base/script.py
@@ -388,18 +388,17 @@ class ScriptMixin(PlatformMixin):
         # Bug 1302237 - URLError: <urlopen error [Errno 104] Connection reset by peer>
         # Bug 1301807 - BadStatusLine: ''
         response = urllib2.urlopen(request)
 
         if parsed_url.scheme in ('http', 'https'):
             expected_file_size = int(response.headers.get('Content-Length'))
 
         self.info('Http code: {}'.format(response.getcode()))
-        for k in ('Content-Encoding', 'Content-Type', 'via', 'x-amz-cf-id',
-                  'x-amz-version-id', 'x-cache'):
+        for k in sorted(response.headers.keys()):
             self.info('{}: {}'.format(k, response.headers.get(k)))
 
         file_contents = response.read()
         obtained_file_size = len(file_contents)
         self.info('Expected file size: {}'.format(expected_file_size))
         self.info('Obtained file size: {}'.format(obtained_file_size))
 
         if obtained_file_size != expected_file_size:
@@ -684,20 +683,26 @@ class ScriptMixin(PlatformMixin):
         # 2) We're guaranteed to have download the file with error_level=FATAL
         #    Let's unpack the file
         function, kwargs = _determine_extraction_method_and_kwargs(url)
         try:
             function(**kwargs)
         except zipfile.BadZipfile:
             # Bug 1305752 - Sometimes a good download turns out to be a
             # corrupted zipfile. Let's upload the file for inspection
-            filepath = os.path.join(self.query_abs_dirs()['abs_upload_dir'], url.split('/')[-1])
+            upload_dir = self.query_abs_dirs()['abs_upload_dir']
+            self.mkdir_p(upload_dir)
+            filepath = os.path.join(upload_dir, url.split('/')[-1])
+            contents = compressed_file.read()
             self.info('Storing corrupted file to {}'.format(filepath))
             with open(filepath, 'w') as f:
-                f.write(compressed_file.read())
+                f.write(contents)
+
+            self.run_command(command=['du', filepath])
+            self.run_command(command=['file', filepath])
 
             # Dump the exception and exit
             self.exception(level=FATAL)
 
 
     def load_json_url(self, url, error_level=None, *args, **kwargs):
         """ Returns a json object from a url (it retries). """
         contents = self._retry_download(