Bug 1390693 - Upload docs to project and version specific locations; r?dustin draft
authorGregory Szorc <gps@mozilla.com>
Thu, 24 Aug 2017 11:12:21 -0700
changeset 652434 44548d7131582b817723fb040e4a6819d14096fc
parent 652433 1b86f913f9102e3abc3d1e8ff1c020aad9a190ac
child 652435 894334de4a9ef5bc67b65e164fd31690cbad4b44
push id76055
push usergszorc@mozilla.com
push dateThu, 24 Aug 2017 20:42:35 +0000
reviewersdustin
bugs1390693
milestone57.0a1
Bug 1390693 - Upload docs to project and version specific locations; r?dustin Previously, we uploaded the main Firefox tree docs to /. In reality, there are multiple Sphinx projects in the repo. In addition, it is sometimes desirable to access docs for an older version of Firefox. In this commit, we add support for specifying the S3 key prefix for uploads. Then we change the upload code to upload to multiple locations: * <project>/latest (always) * <project>/<version> (if a version is defined in the Sphinx config) * / (for the main Sphinx docs project) For the Firefox docs, ``version`` corresponds to a sanitized value from ``milestone.txt``. Currently, it resolves to ``57.0``. While we're here, we add support for declaring an alternate project name in the Sphinx conf.py file. If ``moz_project_name`` is defined, we use that as the project name. For Firefox, we set it to ``main``. This means our paths (local and uploaded) are now ``main`` instead of ``Mozilla_Source_Tree_Docs``. That's much more pleasant. MozReview-Commit-ID: 8Gl6l2m6uU4
taskcluster/ci/source-test/doc.yml
tools/docs/conf.py
tools/docs/mach_commands.py
tools/docs/moztreedocs/upload.py
--- a/taskcluster/ci/source-test/doc.yml
+++ b/taskcluster/ci/source-test/doc.yml
@@ -7,23 +7,22 @@ doc-generate:
         tier: 1
     worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
     worker:
         docker-image: {in-tree: "lint"}
         max-run-time: 1800
         artifacts:
             - type: file
               name: public/docs.tar.gz
-              path: /home/worker/checkouts/gecko/docs.tar.gz
+              path: /home/worker/checkouts/gecko/docs-out/main.tar.gz
     run:
         using: run-task
         command: >
             cd /home/worker/checkouts/gecko &&
-            ./mach doc --outdir docs-out --no-open --archive &&
-            mv docs-out/Mozilla_Source_Tree_Docs.tar.gz docs.tar.gz
+            ./mach doc --outdir docs-out --no-open --archive
     when:
         files-changed:
             - '**/*.py'
             - '**/*.rst'
             - 'tools/docs/**'
 
 doc-upload:
     description: Generate and upload the Sphinx documentation
--- a/tools/docs/conf.py
+++ b/tools/docs/conf.py
@@ -76,8 +76,10 @@ else:
     # environment handles this otherwise.
     import sphinx_rtd_theme
     html_theme = 'sphinx_rtd_theme'
     html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
 
 
 html_static_path = ['_static']
 htmlhelp_basename = 'MozillaTreeDocs'
+
+moz_project_name = 'main'
--- a/tools/docs/mach_commands.py
+++ b/tools/docs/mach_commands.py
@@ -61,40 +61,39 @@ class Documentation(MachCommandBase):
         for path in what:
             path = os.path.normpath(os.path.abspath(path))
             docdir = self._find_doc_dir(path)
 
             if not docdir:
                 failed.append((path, 'could not find docs at this location'))
                 continue
 
-            # find project name to use as a namespace within `outdir`
-            project = self._find_project_name(docdir)
-            savedir = os.path.join(format_outdir, project)
+            props = self._project_properties(docdir)
+            savedir = os.path.join(format_outdir, props['project'])
 
             args = [
                 'sphinx',
                 '-b', format,
                 docdir,
                 savedir,
             ]
             result = sphinx.build_main(args)
             if result != 0:
                 failed.append((path, 'sphinx return code %d' % result))
             else:
                 generated.append(savedir)
 
             if archive:
                 archive_path = os.path.join(outdir,
-                                            '%s.tar.gz' %  project)
+                                            '%s.tar.gz' % props['project'])
                 moztreedocs.create_tarball(archive_path, savedir)
                 print('Archived to %s' % archive_path)
 
             if upload:
-                self._s3_upload(savedir)
+                self._s3_upload(savedir, props['project'], props['version'])
 
             index_path = os.path.join(savedir, 'index.html')
             if not http and auto_open and os.path.isfile(index_path):
                 webbrowser.open(index_path)
 
         if generated:
             print('\nGenerated documentation:\n%s\n' % '\n'.join(generated))
 
@@ -108,37 +107,63 @@ class Documentation(MachCommandBase):
             if len(addr) != 2:
                 return die('invalid address: %s' % http)
 
             httpd = mozhttpd.MozHttpd(host=addr[0], port=addr[1],
                                       docroot=format_outdir)
             print('listening on %s:%d' % addr)
             httpd.start(block=True)
 
-    def _find_project_name(self, path):
+    def _project_properties(self, path):
         import imp
         path = os.path.join(path, 'conf.py')
         with open(path, 'r') as fh:
             conf = imp.load_module('doc_conf', fh, path,
                                    ('.py', 'r', imp.PY_SOURCE))
 
-        return conf.project.replace(' ', '_')
+        # Prefer the Mozilla project name, falling back to Sphinx's
+        # default variable if it isn't defined.
+        project = getattr(conf, 'moz_project_name', None)
+        if not project:
+            project = conf.project.replace(' ', '_')
+
+        return {
+            'project': project,
+            'version': getattr(conf, 'version', None)
+        }
 
     def _find_doc_dir(self, path):
         search_dirs = ('doc', 'docs')
         for d in search_dirs:
             p = os.path.join(path, d)
             if os.path.isfile(os.path.join(p, 'conf.py')):
                 return p
 
-    def _s3_upload(self, root):
+    def _s3_upload(self, root, project, version=None):
         self.virtualenv_manager.install_pip_package('boto3==1.4.4')
 
         from moztreedocs import distribution_files
         from moztreedocs.upload import s3_upload
-        files = distribution_files(root)
-        s3_upload(files)
+
+        # Files are uploaded to multiple locations:
+        #
+        # <project>/latest
+        # <project>/<version>
+        #
+        # This allows multiple projects and versions to be stored in the
+        # S3 bucket.
+
+        files = list(distribution_files(root))
+
+        s3_upload(files, key_prefix='%s/latest' % project)
+        if version:
+            s3_upload(files, key_prefix='%s/%s' % (project, version))
+
+        # Until we redirect / to main/latest, upload the main docs
+        # to the root.
+        if project == 'main':
+            s3_upload(files)
 
 
 def die(msg, exit_code=1):
     msg = '%s: %s' % (sys.argv[0], msg)
     print(msg, file=sys.stderr)
     return exit_code
--- a/tools/docs/moztreedocs/upload.py
+++ b/tools/docs/moztreedocs/upload.py
@@ -7,17 +7,25 @@ from __future__ import absolute_import, 
 import io
 import mimetypes
 import os
 
 import boto3
 import requests
 
 
-def s3_upload(files):
+def s3_upload(files, key_prefix=None):
+    """Upload files to an S3 bucket.
+
+    ``files`` is an iterable of ``(path, BaseFile)`` (typically from a
+    mozpack Finder).
+
+    Keys in the bucket correspond to source filenames. If ``key_prefix`` is
+    defined, key names will be ``<key_prefix>/<path>``.
+    """
     region = 'us-west-2'
     level = os.environ.get('MOZ_SCM_LEVEL', '1')
     bucket = {
         '1': 'gecko-docs.mozilla.org-l1',
         '2': 'gecko-docs.mozilla.org-l2',
         '3': 'gecko-docs.mozilla.org',
     }[level]
     secrets_url = 'http://taskcluster/secrets/v1/secret/'
@@ -43,13 +51,20 @@ def s3_upload(files):
 
     for path, f in files:
         content_type, content_encoding = mimetypes.guess_type(path)
         extra_args = {}
         if content_type:
             extra_args['ContentType'] = content_type
         if content_encoding:
             extra_args['ContentEncoding'] = content_encoding
-        print('uploading', path)
+
+        if key_prefix:
+            key = '%s/%s' % (key_prefix, path)
+        else:
+            key = path
+
+        print('uploading %s to %s' % (path, key))
+
         # The file types returned by mozpack behave like file objects. But they
         # don't accept an argument to read(). So we wrap in a BytesIO.
-        s3.upload_fileobj(io.BytesIO(f.read()), bucket, path,
+        s3.upload_fileobj(io.BytesIO(f.read()), bucket, key,
                           ExtraArgs=extra_args)