Bug 1234913 - Post: Unify trees and known_trees; remove explicit trees. r?chmanchester draft
authorNick Alexander <nalexander@mozilla.com>
Wed, 24 Feb 2016 23:47:06 -0800
changeset 334427 2971b167c5bcef98ae7b6fdbd6f1c583f4c6f6e5
parent 334426 6f2a6fb3e295d0178011a769e8e999f7f9fd1b61
child 514912 1eb36b555b1c87696652e28480766b2acf65c787
push id11551
push usernalexander@mozilla.com
push dateThu, 25 Feb 2016 07:47:43 +0000
reviewerschmanchester
bugs1234913
milestone47.0a1
Bug 1234913 - Post: Unify trees and known_trees; remove explicit trees. r?chmanchester Just a little clean-up and simplification. MozReview-Commit-ID: 34lNJQUuHN4
python/mozbuild/mozbuild/artifacts.py
--- a/python/mozbuild/mozbuild/artifacts.py
+++ b/python/mozbuild/mozbuild/artifacts.py
@@ -736,54 +736,45 @@ class Artifacts(object):
             return 'win64' if target_64bit else 'win32'
         if buildconfig.defines.get('XP_MACOSX', False):
             # We only produce unified builds in automation, so the target_cpu
             # check is not relevant.
             return 'macosx64'
         raise Exception('Cannot determine default job for |mach artifact|!')
 
     def _find_hg_pushheads(self):
-        """Return an iterator of (hg_hash, {tree-set}) associating hg revision
-        hashes that might be pushheads with the trees they are known
-        to be in.
+        """Return an iterator of hg revision hashes that might be pushheads.
 
         More recent hashes come earlier in the list.
         """
 
         hg_hash_list = subprocess.check_output([
             self._hg, 'log',
             '--template', '{node}\n',
             '-r', 'last(public() and ::., {num})'.format(
                 num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT)
         ])
 
-        # We are not using any local pushlog data, so we never know there is a
-        # push (resulting in any kind of job) corresponding to any trees.
-        trees = tuple()
         count = 0
         for hg_hash in hg_hash_list.splitlines():
             hg_hash = hg_hash.strip()
             if not hg_hash:
                 continue
             count += 1
-            yield (hg_hash, trees)
+            yield hg_hash
 
         if not count:
             raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n\n'
                             'See https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
                                 num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
 
     def _find_git_pushheads(self, rev):
-        """Return an iterator of (hg_hash, {tree-set}) associating hg revision
-        hashes that might be pushheads with the trees they are known
-        to be in.
+        """Return an iterator of hg revision hashes that might be pushheads.
 
-        More recent hashes should come earlier in the list.  It's okay
-        for tree-set to be the empty set {}; in that case, we'll query
-        the TaskCluster Index to determine the tree-set.
+        More recent hashes come earlier in the list.
         """
 
         import which
         cinnabar = which.which('git-cinnabar')
 
         # First commit is HEAD, next is HEAD~1, etc.
         rev_list = subprocess.check_output([
             self._git, 'rev-list', '--ancestry-path',
@@ -792,47 +783,40 @@ class Artifacts(object):
 
         hg_hash_list = subprocess.check_output([
             self._python, cinnabar, 'git2hg',
         ] + rev_list.splitlines())
 
         zeroes = "0" * 40
         self._index = taskcluster.Index()
 
-        # We don't have pushlog data, so we never know there is a push
-        # (resulting in any kind of job) corresponding to any trees.
-        trees = tuple()
         count = 0
         for hg_hash in hg_hash_list.splitlines():
             hg_hash = hg_hash.strip()
             if not hg_hash or hg_hash == zeroes:
                 continue
             count += 1
-            yield (hg_hash, trees)
+            yield hg_hash
 
         if not count:
             raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n\n'
                             'See https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
                                 num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
 
-    def find_pushhead_artifacts(self, task_cache, tree_cache, job, pushhead, trees):
-        known_trees = set(tree_cache.artifact_trees(pushhead, trees))
-        if not known_trees:
+    def find_pushhead_artifacts(self, task_cache, tree_cache, job, pushhead):
+        # Accept artifacts from any tree where they are available.
+        trees = list(tree_cache.artifact_trees(pushhead, tuple()))
+        if not trees:
             return None
-        if not trees:
-            # Accept artifacts from any tree where they are available.
-            trees = list(known_trees)
-            trees.sort()
+        trees.sort()
 
         # If we ever find a rev that's a pushhead on multiple trees, we want
         # the most recent one.
         for tree in reversed(trees):
             tree = self._tree_replacements.get(tree) or tree
-            if tree not in known_trees:
-                continue
             try:
                 urls = task_cache.artifact_urls(tree, job, pushhead)
             except ValueError:
                 continue
             if urls:
                 self.log(logging.INFO, 'artifact',
                          {'pushhead': pushhead,
                           'tree': tree},
@@ -902,23 +886,23 @@ class Artifacts(object):
         and tree-sets they are known to be in, trying to download and
         install from each.
         """
 
         urls = None
         count = 0
         # with blocks handle handle persistence.
         with self._task_cache as task_cache, self._tree_cache as tree_cache:
-            for hg_hash, trees in hg_pushheads:
+            for hg_hash in hg_pushheads:
                 count += 1
                 self.log(logging.DEBUG, 'artifact',
                          {'hg_hash': hg_hash},
                          'Trying to find artifacts for hg revision {hg_hash}.')
                 urls = self.find_pushhead_artifacts(task_cache, tree_cache,
-                                                    self._job, hg_hash, trees)
+                                                    self._job, hg_hash)
                 if urls:
                     for url in urls:
                         if self.install_from_url(url, distdir):
                             return 1
                     return 0
 
         self.log(logging.ERROR, 'artifact',
                  {'count': count},
@@ -933,23 +917,22 @@ class Artifacts(object):
         hg_pushheads = self._find_git_pushheads('HEAD')
         return self._install_from_hg_pushheads(hg_pushheads, distdir)
 
     def install_from_hg_revset(self, revset, distdir):
         revision = subprocess.check_output([self._hg, 'log', '--template', '{node}\n',
                                             '-r', revset]).strip()
         if len(revision.split('\n')) != 1:
             raise ValueError('hg revision specification must resolve to exactly one commit')
-        hg_pushheads = [(revision, tuple())]
         self.log(logging.INFO, 'artifact',
                  {'revset': revset,
                   'revision': revision},
                  'Will only accept artifacts from a pushhead at {revision} '
                  '(matched revset "{revset}").')
-        return self._install_from_hg_pushheads(hg_pushheads, distdir)
+        return self._install_from_hg_pushheads([revision], distdir)
 
     def install_from(self, source, distdir):
         """Install artifacts from a ``source`` into the given ``distdir``.
         """
         if source and os.path.isfile(source):
             return self.install_from_file(source, distdir)
         elif source and urlparse.urlparse(source).scheme:
             return self.install_from_url(source, distdir)