Bug 1314795 - introduce build_date to params that defaults to pushdate, r?dustin draft
authorJordan Lund <jlund@mozilla.com>
Wed, 02 Nov 2016 19:23:27 -0700
changeset 433469 668b61866380f8ce2424f231c54621b3187daf18
parent 433136 50d2cc0d4cbd5dcbf84bde8b8622a1e99d125f59
child 535901 cf4ec4d9faead2bcf19489969277941b5eac1b2c
push id34591
push userjlund@mozilla.com
push dateThu, 03 Nov 2016 19:26:01 +0000
reviewersdustin
bugs1314795
milestone52.0a1
Bug 1314795 - introduce build_date to params that defaults to pushdate, r?dustin MozReview-Commit-ID: AviPKtw3BiY
taskcluster/docs/parameters.rst
taskcluster/taskgraph/decision.py
taskcluster/taskgraph/task/docker_image.py
taskcluster/taskgraph/transforms/android_stuff.py
taskcluster/taskgraph/transforms/job/hazard.py
taskcluster/taskgraph/transforms/job/mozharness.py
taskcluster/taskgraph/transforms/job/spidermonkey.py
taskcluster/taskgraph/transforms/job/toolchain.py
taskcluster/taskgraph/transforms/marionette_harness.py
taskcluster/taskgraph/transforms/task.py
--- a/taskcluster/docs/parameters.rst
+++ b/taskcluster/docs/parameters.rst
@@ -44,16 +44,24 @@ Push Information
 
 ``pushlog_id``
    The ID from the ``hg.mozilla.org`` pushlog
 
 ``pushdate``
    The timestamp of the push to the repository that triggered this decision
    task.  Expressed as an integer seconds since the UNIX epoch.
 
+``build_date``
+   The timestamp of the build date. Defaults to ``pushdate`` and falls back to present time of
+   taskgraph invocation. Expressed as an integer seconds since the UNIX epoch.
+
+``moz_build_date``
+   A formatted timestamp of ``build_date``. Expressed as a string with the following
+   format: %Y%m%d%H%M%S
+
 Tree Information
 ----------------
 
 ``project``
    Another name for what may otherwise be called tree or branch or
    repository.  This is the unqualified name, such as ``mozilla-central`` or
    ``cedar``.
 
--- a/taskcluster/taskgraph/decision.py
+++ b/taskcluster/taskgraph/decision.py
@@ -4,16 +4,18 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import json
 import logging
+
+import time
 import yaml
 
 from .generator import TaskGraphGenerator
 from .create import create_tasks
 from .parameters import Parameters
 from .target_tasks import get_method
 from .taskgraph import TaskGraph
 
@@ -120,16 +122,22 @@ def get_decision_parameters(options):
         'target_tasks_method',
     ] if n in options}
 
     # owner must be an email, but sometimes (e.g., for ffxbld) it is not, in which
     # case, fake it
     if '@' not in parameters['owner']:
         parameters['owner'] += '@noreply.mozilla.org'
 
+    # use the pushdate as build_date if given, else use current time
+    parameters['build_date'] = parameters['pushdate'] or int(time.time())
+    # moz_build_date is the build identifier based on build_date
+    parameters['moz_build_date'] = time.strftime("%Y%m%d%H%M%S",
+                                                 time.gmtime(parameters['build_date']))
+
     project = parameters['project']
     try:
         parameters.update(PER_PROJECT_PARAMETERS[project])
     except KeyError:
         logger.warning("using default project parameters; add {} to "
                        "PER_PROJECT_PARAMETERS in {} to customize behavior "
                        "for this project".format(project, __file__))
         parameters.update(PER_PROJECT_PARAMETERS['default'])
--- a/taskcluster/taskgraph/task/docker_image.py
+++ b/taskcluster/taskgraph/task/docker_image.py
@@ -33,25 +33,23 @@ class DockerImageTask(base.Task):
         super(DockerImageTask, self).__init__(*args, **kwargs)
 
     def __eq__(self, other):
         return super(DockerImageTask, self).__eq__(other) and \
                self.index_paths == other.index_paths
 
     @classmethod
     def load_tasks(cls, kind, path, config, params, loaded_tasks):
-        pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(params['pushdate']))
-
         parameters = {
             'pushlog_id': params.get('pushlog_id', 0),
-            'pushdate': pushdate,
-            'pushtime': pushdate[8:],
-            'year': pushdate[0:4],
-            'month': pushdate[4:6],
-            'day': pushdate[6:8],
+            'pushdate': params['moz_build_date'],
+            'pushtime': params['moz_build_date'][8:],
+            'year': params['moz_build_date'][0:4],
+            'month': params['moz_build_date'][4:6],
+            'day': params['moz_build_date'][6:8],
             'project': params['project'],
             'docker_image': docker_image,
             'base_repository': params['base_repository'] or params['head_repository'],
             'head_repository': params['head_repository'],
             'head_ref': params['head_ref'] or params['head_rev'],
             'head_rev': params['head_rev'],
             'owner': params['owner'],
             'level': params['level'],
--- a/taskcluster/taskgraph/transforms/android_stuff.py
+++ b/taskcluster/taskgraph/transforms/android_stuff.py
@@ -18,18 +18,17 @@ def setup_task(config, tasks):
     for task in tasks:
         task['label'] = task['name']
         env = task['worker'].setdefault('env', {})
         env.update({
             'GECKO_BASE_REPOSITORY': config.params['base_repository'],
             'GECKO_HEAD_REF': config.params['head_rev'],
             'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
             'GECKO_HEAD_REV': config.params['head_rev'],
-            'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S",
-                                            time.gmtime(config.params['pushdate'])),
+            'MOZ_BUILD_DATE': config.params['moz_build_date'],
             'MOZ_SCM_LEVEL': config.params['level'],
             'MH_BRANCH': config.params['project'],
         })
 
         task['worker'].setdefault('caches', []).append({
             'type': 'persistent',
             'name': 'level-{}-{}-tc-vcs'.format(
                 config.params['level'], config.params['project']),
--- a/taskcluster/taskgraph/transforms/job/hazard.py
+++ b/taskcluster/taskgraph/transforms/job/hazard.py
@@ -51,17 +51,17 @@ def docker_worker_hazard(config, job, ta
     docker_worker_add_tc_vcs_cache(config, job, taskdesc)
     docker_worker_add_public_artifacts(config, job, taskdesc)
     docker_worker_add_workspace_cache(config, job, taskdesc)
     docker_worker_setup_secrets(config, job, taskdesc)
     docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
 
     env = worker['env']
     env.update({
-        'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])),
+        'MOZ_BUILD_DATE': config.params['moz_build_date'],
         'MOZ_SCM_LEVEL': config.params['level'],
     })
 
     # script parameters
     if run.get('tooltool-manifest'):
         env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest']
     if run.get('mozconfig'):
         env['MOZCONFIG'] = run['mozconfig']
--- a/taskcluster/taskgraph/transforms/job/mozharness.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness.py
@@ -95,17 +95,17 @@ def mozharness_on_docker_worker_setup(co
     docker_worker_support_vcs_checkout(config, job, taskdesc)
 
     env = worker.setdefault('env', {})
     env.update({
         'MOZHARNESS_CONFIG': ' '.join(run['config']),
         'MOZHARNESS_SCRIPT': run['script'],
         'MH_BRANCH': config.params['project'],
         'MH_BUILD_POOL': 'taskcluster',
-        'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])),
+        'MOZ_BUILD_DATE': config.params['moz_build_date'],
         'MOZ_SCM_LEVEL': config.params['level'],
     })
 
     if 'actions' in run:
         env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])
 
     if 'options' in run:
         env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])
@@ -192,17 +192,17 @@ def mozharness_on_windows(config, job, t
         'path': r'public\build',
         'type': 'directory',
     }]
 
     docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
 
     env = worker['env']
     env.update({
-        'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])),
+        'MOZ_BUILD_DATE': config.params['moz_build_date'],
         'MOZ_SCM_LEVEL': config.params['level'],
         'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool',
         'TOOLTOOL_REV': 'master',
     })
 
     mh_command = [r'c:\mozilla-build\python\python.exe']
     mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')]))
     for cfg in run['config']:
--- a/taskcluster/taskgraph/transforms/job/spidermonkey.py
+++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py
@@ -47,17 +47,17 @@ def docker_worker_spidermonkey(config, j
         })
 
     docker_worker_add_public_artifacts(config, job, taskdesc)
 
     env = worker['env']
     env.update({
         'MOZHARNESS_DISABLE': 'true',
         'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
-        'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])),
+        'MOZ_BUILD_DATE': config.params['moz_build_date'],
         'MOZ_SCM_LEVEL': config.params['level'],
     })
 
     # tooltool downloads; note that this script downloads using the API
     # endpoiint directly, rather than via relengapi-proxy
     worker['caches'].append({
         'type': 'persistent',
         'name': 'tooltool-cache',
--- a/taskcluster/taskgraph/transforms/job/toolchain.py
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -38,17 +38,17 @@ def docker_worker_toolchain(config, job,
         'type': 'directory',
     })
 
     docker_worker_add_tc_vcs_cache(config, job, taskdesc)
     docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
 
     env = worker['env']
     env.update({
-        'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])),
+        'MOZ_BUILD_DATE': config.params['moz_build_date'],
         'MOZ_SCM_LEVEL': config.params['level'],
         'TOOLS_DISABLE': 'true',
     })
 
     # tooltool downloads; note that this downloads using the API endpoint directly,
     # rather than via relengapi-proxy
     worker['caches'].append({
         'type': 'persistent',
@@ -87,17 +87,17 @@ def windows_toolchain(config, job, taskd
         'path': r'llvm-sources',
     }]
     taskdesc['scopes'].extend([
         'generic-worker:cache:' + svn_cache,
     ])
 
     env = worker['env']
     env.update({
-        'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])),
+        'MOZ_BUILD_DATE': config.params['moz_build_date'],
         'MOZ_SCM_LEVEL': config.params['level'],
         'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool',
         'TOOLTOOL_REV': 'master',
     })
 
     hg = r'c:\Program Files\Mercurial\hg.exe'
     bash = r'c:\mozilla-build\msys\bin\bash'
     worker['command'] = [
--- a/taskcluster/taskgraph/transforms/marionette_harness.py
+++ b/taskcluster/taskgraph/transforms/marionette_harness.py
@@ -19,18 +19,17 @@ def setup_task(config, tasks):
         del task['name']
         task['label'] = 'marionette-harness'
         env = task['worker'].setdefault('env', {})
         env.update({
             'GECKO_BASE_REPOSITORY': config.params['base_repository'],
             'GECKO_HEAD_REF': config.params['head_rev'],
             'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
             'GECKO_HEAD_REV': config.params['head_rev'],
-            'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S",
-                                            time.gmtime(config.params['pushdate'])),
+            'MOZ_BUILD_DATE': config.params['moz_build_date'],
             'MOZ_SCM_LEVEL': config.params['level'],
         })
 
         task['worker']['caches'] = [{
             'type': 'persistent',
             'name': 'level-{}-{}-tc-vcs'.format(
                 config.params['level'], config.params['project']),
             'mount-point': "/home/worker/.tc-vcs",
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -104,29 +104,29 @@ task_description_schema = Schema({
             }
         ),
 
         # The rank that the task will receive in the TaskCluster
         # index.  A newly completed task supercedes the currently
         # indexed task iff it has a higher rank.  If unspecified,
         # 'by-tier' behavior will be used.
         'rank': Any(
-            # Rank is equal the timestamp of the pushdate for tier-1
+            # Rank is equal the timestamp of the build_date for tier-1
             # tasks, and zero for non-tier-1.  This sorts tier-{2,3}
             # builds below tier-1 in the index.
             'by-tier',
 
             # Rank is given as an integer constant (e.g. zero to make
             # sure a task is last in the index).
             int,
 
-            # Rank is equal to the timestamp of the pushdate.  This
+            # Rank is equal to the timestamp of the build_date.  This
             # option can be used to override the 'by-tier' behavior
             # for non-tier-1 tasks.
-            'pushdate',
+            'build_date',
         ),
     },
 
     # The `run_on_projects` attribute, defaulting to "all".  This dictates the
     # projects on which this task should be included in the target task set.
     # See the attributes documentation for details.
     Optional('run-on-projects'): [basestring],
 
@@ -288,17 +288,17 @@ UNKNOWN_GROUP_NAME = "Treeherder group {
 
 BUILDBOT_ROUTE_TEMPLATES = [
     "index.buildbot.branches.{project}.{job-name-buildbot}",
     "index.buildbot.revisions.{head_rev}.{project}.{job-name-buildbot}",
 ]
 
 V2_ROUTE_TEMPLATES = [
     "index.gecko.v2.{project}.latest.{product}.{job-name-gecko-v2}",
-    "index.gecko.v2.{project}.pushdate.{pushdate_long}.{product}.{job-name-gecko-v2}",
+    "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}.{job-name-gecko-v2}",
     "index.gecko.v2.{project}.revision.{head_rev}.{product}.{job-name-gecko-v2}",
 ]
 
 # the roots of the treeherder routes, keyed by treeherder environment
 TREEHERDER_ROUTE_ROOTS = {
     'production': 'tc-treeherder',
     'staging': 'tc-treeherder-stage',
 }
@@ -461,19 +461,18 @@ def add_index_routes(config, tasks):
             }
 
         if job_name['gecko-v2'] not in JOB_NAME_WHITELIST:
             raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name['gecko-v2']))
 
         subs = config.params.copy()
         for n in job_name:
             subs['job-name-' + n] = job_name[n]
-        subs['pushdate_long'] = time.strftime(
-            "%Y.%m.%d.%Y%m%d%H%M%S",
-            time.gmtime(config.params['pushdate']))
+        subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
+                                              time.gmtime(config.params['build_date']))
         subs['product'] = index['product']
 
         if 'buildbot' in job_name:
             for tpl in BUILDBOT_ROUTE_TEMPLATES:
                 routes.append(tpl.format(**subs))
         if 'gecko-v2' in job_name:
             for tpl in V2_ROUTE_TEMPLATES:
                 routes.append(tpl.format(**subs))
@@ -481,19 +480,19 @@ def add_index_routes(config, tasks):
         # The default behavior is to rank tasks according to their tier
         extra_index = task.setdefault('extra', {}).setdefault('index', {})
         rank = index.get('rank', 'by-tier')
 
         if rank == 'by-tier':
             # rank is zero for non-tier-1 tasks and based on pushid for others;
             # this sorts tier-{2,3} builds below tier-1 in the index
             tier = task.get('treeherder', {}).get('tier', 3)
-            extra_index['rank'] = 0 if tier > 1 else int(config.params['pushdate'])
-        elif rank == 'pushdate':
-            extra_index['rank'] = int(config.params['pushdate'])
+            extra_index['rank'] = 0 if tier > 1 else int(config.params['build_date'])
+        elif rank == 'build_date':
+            extra_index['rank'] = int(config.params['build_date'])
         else:
             extra_index['rank'] = rank
 
         del task['index']
         yield task
 
 
 @transforms.add
@@ -593,16 +592,16 @@ def check_v2_routes():
     # we only deal with the 'routes' key here
     routes = routes_json['routes']
 
     # we use different variables than mozharness
     for mh, tg in [
             ('{index}', 'index'),
             ('{build_product}', '{product}'),
             ('{build_name}-{build_type}', '{job-name-gecko-v2}'),
-            ('{year}.{month}.{day}.{pushdate}', '{pushdate_long}')]:
+            ('{year}.{month}.{day}.{pushdate}', '{build_date_long}')]:
         routes = [r.replace(mh, tg) for r in routes]
 
     if sorted(routes) != sorted(V2_ROUTE_TEMPLATES):
         raise Exception("V2_ROUTE_TEMPLATES does not match Mozharness's routes.json: "
                         "%s vs %s" % (V2_ROUTE_TEMPLATES, routes))
 
 check_v2_routes()