Bug 1390693 - Use distribution_files() for finding files to upload; r?dustin draft
authorGregory Szorc <gps@mozilla.com>
Thu, 24 Aug 2017 11:07:10 -0700
changeset 652432 0061aa1f8cb9675b30c7db680e1f97334391b3f1
parent 652431 ecfcfc64013751517cb1d7bedef34c6916659f31
child 652433 1b86f913f9102e3abc3d1e8ff1c020aad9a190ac
push id76055
push usergszorc@mozilla.com
push dateThu, 24 Aug 2017 20:42:35 +0000
reviewersdustin
bugs1390693
milestone57.0a1
Bug 1390693 - Use distribution_files() for finding files to upload; r?dustin Instead of doing the file finding inside s3_upload(), the function now takes the output of distribution_files(). The new code is much simpler. MozReview-Commit-ID: 43i2Alvyu5i
tools/docs/mach_commands.py
tools/docs/moztreedocs/upload.py
--- a/tools/docs/mach_commands.py
+++ b/tools/docs/mach_commands.py
@@ -130,16 +130,19 @@ class Documentation(MachCommandBase):
         help='Path(s) to documentation to build and upload.')
     def upload_docs(self, what=None):
         self._activate_virtualenv()
         self.virtualenv_manager.install_pip_package('boto3==1.4.4')
 
         outdir = os.path.join(self.topobjdir, 'docs')
         self.build_docs(what=what, outdir=outdir, format='html')
 
+        from moztreedocs import distribution_files
         from moztreedocs.upload import s3_upload
-        s3_upload(os.path.join(outdir, 'html', 'Mozilla_Source_Tree_Docs'))
+        files = distribution_files(os.path.join(outdir, 'html',
+                                                'Mozilla_Source_Tree_Docs'))
+        s3_upload(files)
 
 
 def die(msg, exit_code=1):
     msg = '%s: %s' % (sys.argv[0], msg)
     print(msg, file=sys.stderr)
     return exit_code
--- a/tools/docs/moztreedocs/upload.py
+++ b/tools/docs/moztreedocs/upload.py
@@ -1,22 +1,23 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, unicode_literals
 
+import io
 import mimetypes
 import os
 
 import boto3
 import requests
 
 
-def s3_upload(root):
+def s3_upload(files):
     region = 'us-west-2'
     level = os.environ.get('MOZ_SCM_LEVEL', '1')
     bucket = {
         '1': 'gecko-docs.mozilla.org-l1',
         '2': 'gecko-docs.mozilla.org-l2',
         '3': 'gecko-docs.mozilla.org',
     }[level]
     secrets_url = 'http://taskcluster/secrets/v1/secret/'
@@ -35,31 +36,20 @@ def s3_upload(root):
             aws_access_key_id=secret['AWS_ACCESS_KEY_ID'],
             aws_secret_access_key=secret['AWS_SECRET_ACCESS_KEY'],
             region_name=region)
     else:
         print("Trying to use your AWS credentials..")
         session = boto3.session.Session(region_name=region)
     s3 = session.client('s3')
 
-    try:
-        old_cwd = os.getcwd()
-        os.chdir(root)
-
-        for dir, dirs, filenames in os.walk('.'):
-            if dir == '.':
-                # ignore a few things things in the root directory
-                bad = [d for d in dirs if
-                       d.startswith('.') or d in ('_venv', '_staging')]
-                for b in bad:
-                    dirs.remove(b)
-            for filename in filenames:
-                pathname = os.path.join(dir, filename)[2:]  # strip '.''
-                content_type, content_encoding = mimetypes.guess_type(pathname)
-                extra_args = {}
-                if content_type:
-                    extra_args['ContentType'] = content_type
-                if content_encoding:
-                    extra_args['ContentEncoding'] = content_encoding
-                print('uploading', pathname)
-                s3.upload_file(pathname, bucket, pathname, ExtraArgs=extra_args)
-    finally:
-        os.chdir(old_cwd)
+    for path, f in files:
+        content_type, content_encoding = mimetypes.guess_type(path)
+        extra_args = {}
+        if content_type:
+            extra_args['ContentType'] = content_type
+        if content_encoding:
+            extra_args['ContentEncoding'] = content_encoding
+        print('uploading', path)
+        # The file types returned by mozpack behave like file objects. But they
+        # don't accept an argument to read(). So we wrap in a BytesIO.
+        s3.upload_fileobj(io.BytesIO(f.read()), bucket, path,
+                          ExtraArgs=extra_args)