bug 1423081 - desktop release support in transforms/task.py r=callek draft
authorAki Sasaki <asasaki@mozilla.com>
Tue, 05 Dec 2017 19:35:45 -0800
changeset 711437 1959452ccacec9c96265fb991c909d977e3277ee
parent 711436 fbc98ebda304701de7d3094e95f15c2eaaab03b4
child 711438 f3a6f83a207bf487f499881f1706749abacf7324
push id93069
push userasasaki@mozilla.com
push dateWed, 13 Dec 2017 22:57:07 +0000
reviewerscallek
bugs1423081
milestone59.0a1
bug 1423081 - desktop release support in transforms/task.py r=callek - update notification scheme - update shipping phases to build, promote, push, ship - add binary transparency support MozReview-Commit-ID: DVQLKep77LM
taskcluster/ci/beetmover-checksums/kind.yml
taskcluster/taskgraph/transforms/release_updates.py
taskcluster/taskgraph/transforms/task.py
--- a/taskcluster/ci/beetmover-checksums/kind.yml
+++ b/taskcluster/ci/beetmover-checksums/kind.yml
@@ -12,17 +12,16 @@ transforms:
 kind-dependencies:
    - checksums-signing
 
 only-for-attributes:
    - nightly
 
 job-template:
    shipping-phase: promote
-   shipping-product: firefox
    notifications:
       completed:
          subject: "COMPLETED: [{task[shipping-product]} {release_config[version]} build{release_config[build_number]}/{config[params][project]}] {task_def[metadata][name]} task"
          message: "COMPLETED: [{task[shipping-product]} {release_config[version]} build{release_config[build_number]}/{config[params][project]}] {task_def[metadata][name]} task"
          plugins:
             by-project:
                mozilla-beta: ["log_collect"]
                mozilla-release: ["log_collect"]
--- a/taskcluster/taskgraph/transforms/release_updates.py
+++ b/taskcluster/taskgraph/transforms/release_updates.py
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
-Transform the beetmover task into an actual task description.
+Transform the update generation task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from copy import deepcopy
 
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.schema import resolve_keyed_by
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -42,31 +42,45 @@ def _run_task_suffix():
 
 
 # shortcut for a string where task references are allowed
 taskref_or_string = Any(
     basestring,
     {Required('task-reference'): basestring},
 )
 
-notification_ids = optionally_keyed_by('project', Any(None, [basestring]))
+# For more details look at https://github.com/mozilla-releng/pulse-notify#task-definition
+#
+# Notification fields are keyed by project, which lets you use
+# `by-project` and define different messages or recepients for each
+# project.
 notification_schema = Schema({
-    Required("subject"): basestring,
-    Required("message"): basestring,
-    Required("ids"): notification_ids,
+    # notification routes for this task status
+    # https://github.com/mozilla-releng/pulse-notify/tree/master/pulsenotify/plugins
+    Optional('plugins'): optionally_keyed_by('project', [basestring]),
 
-})
+    # notification subject
+    Optional('subject'): optionally_keyed_by('project', basestring),
 
-FULL_TASK_NAME = (
-    "[{task[payload][properties][product]} "
-    "{task[payload][properties][version]} "
-    "build{task[payload][properties][build_number]}/"
-    "{task[payload][sourcestamp][branch]}] "
-    "{task[metadata][name]} task"
-)
+    # notification message
+    Optional('message'): optionally_keyed_by('project', basestring),
+
+    # emails to be notified (for ses and smtp plugins only)
+    Optional('emails'): optionally_keyed_by('project', [basestring]),
+
+    # IRC nicknames to notify (for irc plugin only)
+    Optional('nicks'): optionally_keyed_by('project', [basestring]),
+
+    # IRC channels to send a notification to (for irc plugin only)
+    Optional('channels'): optionally_keyed_by('project', [basestring]),
+
+    # notify a 'name' based on a configuration in the service
+    # https://github.com/mozilla-releng/pulse-notify/blob/production/pulsenotify/id_configs/prod.yml
+    Optional('ids'): optionally_keyed_by('project', [basestring]),
+})
 
 # A task description is a general description of a TaskCluster task
 task_description_schema = Schema({
     # the label for this task
     Required('label'): basestring,
 
     # description of the task (for metadata)
     Required('description'): basestring,
@@ -90,16 +104,17 @@ task_description_schema = Schema({
     # custom routes for this task; the default treeherder routes will be added
     # automatically
     Optional('routes'): [basestring],
 
     # custom scopes for this task; any scopes required for the worker will be
     # added automatically. The following parameters will be substituted in each
     # scope:
     #  {level} -- the scm level of this push
+    #  {project} -- the project of this push
     Optional('scopes'): [basestring],
 
     # Tags
     Optional('tags'): {basestring: basestring},
 
     # custom "task.extra" content
     Optional('extra'): {basestring: object},
 
@@ -173,18 +188,19 @@ task_description_schema = Schema({
     # projects on which this task should be included in the target task set.
     # See the attributes documentation for details.
     Optional('run-on-projects'): [basestring],
 
     # The `shipping_phase` attribute, defaulting to None. This specifies the
     # release promotion phase that this task belongs to.
     Required('shipping-phase', default=None): Any(
         None,
+        'build',
         'promote',
-        'publish',
+        'push',
         'ship',
     ),
 
     # The `shipping_product` attribute, defaulting to None. This specifies the
     # release promotion product that this task belongs to.
     Required('shipping-product', default=None): Any(
         None,
         'devedition',
@@ -244,21 +260,30 @@ task_description_schema = Schema({
     # the provisioner-id/worker-type for the task.  The following parameters will
     # be substituted in this string:
     #  {level} -- the scm level of this push
     'worker-type': basestring,
 
     # Whether the job should use sccache compiler caching.
     Required('needs-sccache', default=False): bool,
 
-    # notifications
+    # Send notifications using pulse-notifier[1] service:
+    #
+    #     https://github.com/mozilla-releng/pulse-notify
+    #
+    # Notifications are send uppon task completion, failure or when exception
+    # is raised.
     Optional('notifications'): {
-        Optional('completed'): Any(notification_schema, notification_ids),
-        Optional('failed'): Any(notification_schema, notification_ids),
-        Optional('exception'): Any(notification_schema, notification_ids),
+        Optional('defined'): notification_schema,
+        Optional('pending'): notification_schema,
+        Optional('running'): notification_schema,
+        Optional('artifact-created'): notification_schema,
+        Optional('completed'): notification_schema,
+        Optional('failed'): notification_schema,
+        Optional('exception'): notification_schema,
     },
 
     # information specific to the worker implementation that will run this task
     'worker': Any({
         Required('implementation'): Any('docker-worker', 'docker-engine'),
         Required('os'): 'linux',
 
         # For tasks that will run in docker-worker or docker-engine, this is the
@@ -330,18 +355,21 @@ task_description_schema = Schema({
 
         # the command to run; if not given, docker-worker will default to the
         # command in the docker image
         Optional('command'): [taskref_or_string],
 
         # the maximum time to run, in seconds
         Required('max-run-time'): int,
 
-        # the exit status code that indicates the task should be retried
-        Optional('retry-exit-status'): int,
+        # the exit status code(s) that indicates the task should be retried
+        Optional('retry-exit-status'): Any(
+            int,
+            [int],
+        ),
     }, {
         Required('implementation'): 'generic-worker',
         Required('os'): Any('windows', 'macosx'),
         # see http://schemas.taskcluster.net/generic-worker/v1/payload.json
         # and https://docs.taskcluster.net/reference/workers/generic-worker/payload
 
         # command is a list of commands to run, sequentially
         # on Windows, each command is a string, on OS X and Linux, each command is
@@ -425,16 +453,17 @@ task_description_schema = Schema({
             Optional('revision'): basestring,
             Optional('repository'): basestring,
             Optional('project'): basestring,
         },
         Required('properties'): {
             'product': basestring,
             Optional('build_number'): int,
             Optional('release_promotion'): bool,
+            Optional('generate_bz2_blob'): bool,
             Optional('tuxedo_server_url'): optionally_keyed_by('project', basestring),
             Extra: taskref_or_string,  # additional properties are allowed
         },
     }, {
         Required('implementation'): 'native-engine',
         Required('os'): Any('macosx', 'linux'),
 
         # A link for an executable to download
@@ -479,16 +508,18 @@ task_description_schema = Schema({
 
             # Paths to the artifacts to sign
             Required('paths'): [basestring],
 
             # Signing formats to use on each of the paths
             Required('formats'): [basestring],
         }],
     }, {
+        Required('implementation'): 'binary-transparency',
+    }, {
         Required('implementation'): 'beetmover',
 
         # the maximum time to run, in seconds
         Required('max-run-time', default=600): int,
 
         # locale key, if this is a locale beetmover job
         Optional('locale'): basestring,
 
@@ -779,17 +810,20 @@ def build_docker_worker_payload(config, 
     }
     if 'command' in worker:
         payload['command'] = worker['command']
 
     if 'max-run-time' in worker:
         payload['maxRunTime'] = worker['max-run-time']
 
     if 'retry-exit-status' in worker:
-        payload['onExitStatus'] = {'retry': [worker['retry-exit-status']]}
+        if isinstance(worker['retry-exit-status'], int):
+            payload['onExitStatus'] = {'retry': [worker['retry-exit-status']]}
+        elif isinstance(worker['retry-exit-status'], list):
+            payload['onExitStatus'] = {'retry': worker['retry-exit-status']}
 
     if 'artifacts' in worker:
         artifacts = {}
         for artifact in worker['artifacts']:
             artifacts[artifact['name']] = {
                 'path': artifact['path'],
                 'type': artifact['type'],
                 'expires': task_def['expires'],  # always expire with the task
@@ -941,16 +975,37 @@ def build_scriptworker_signing_payload(c
     worker = task['worker']
 
     task_def['payload'] = {
         'maxRunTime': worker['max-run-time'],
         'upstreamArtifacts':  worker['upstream-artifacts']
     }
 
 
+@payload_builder('binary-transparency')
+def build_binary_transparency_payload(config, task, task_def):
+    release_config = get_release_config(config)
+
+    task_def['payload'] = {
+        'version': release_config['version'],
+        'chain': 'TRANSPARENCY.pem',
+        'contact': task_def['metadata']['owner'],
+        'maxRunTime': 600,
+        'stage-product': task['shipping-product'],
+        'summary': (
+            'https://archive.mozilla.org/pub/{}/candidates/'
+            '{}-candidates/build{}/SHA256SUMMARY'
+        ).format(
+            task['shipping-product'],
+            release_config['version'],
+            release_config['build_number'],
+        ),
+    }
+
+
 @payload_builder('beetmover')
 def build_beetmover_payload(config, task, task_def):
     worker = task['worker']
     release_config = get_release_config(config)
 
     task_def['payload'] = {
         'maxRunTime': worker['max-run-time'],
         'upload_date': config.params['build_date'],
@@ -960,17 +1015,17 @@ def build_beetmover_payload(config, task
         task_def['payload']['locale'] = worker['locale']
     if release_config:
         task_def['payload'].update(release_config)
 
 
 @payload_builder('beetmover-cdns')
 def build_beetmover_cdns_payload(config, task, task_def):
     worker = task['worker']
-    release_config = get_release_config(config, force=True)
+    release_config = get_release_config(config)
 
     task_def['payload'] = {
         'maxRunTime': worker['max-run-time'],
         'product': worker['product'],
         'version': release_config['version'],
         'build_number': release_config['build_number'],
     }
 
@@ -1130,19 +1185,17 @@ def add_nightly_index_routes(config, tas
 
     return task
 
 
 @index_builder('release')
 def add_release_index_routes(config, task):
     index = task.get('index')
     routes = []
-    release_config = get_release_config(config, force=True)
-
-    verify_index_job_name(index)
+    release_config = get_release_config(config)
 
     subs = config.params.copy()
     subs['build_number'] = str(release_config['build_number'])
     subs['revision'] = subs['head_rev']
     subs['underscore_version'] = release_config['version'].replace('.', '_')
     subs['product'] = index['product']
     subs['trust-domain'] = config.graph_config['trust-domain']
     subs['branch'] = subs['project']
@@ -1273,19 +1326,20 @@ def add_index_routes(config, tasks):
 
 
 @transforms.add
 def build_task(config, tasks):
     for task in tasks:
         level = str(config.params['level'])
         worker_type = task['worker-type'].format(level=level)
         provisioner_id, worker_type = worker_type.split('/', 1)
+        project = config.params['project']
 
         routes = task.get('routes', [])
-        scopes = [s.format(level=level) for s in task.get('scopes', [])]
+        scopes = [s.format(level=level, project=project) for s in task.get('scopes', [])]
 
         # set up extra
         extra = task.get('extra', {})
         extra['parent'] = os.environ.get('TASK_ID', '')
         task_th = task.get('treeherder')
         if task_th:
             extra['treeherderEnv'] = task_th['environments']
 
@@ -1377,74 +1431,87 @@ def build_task(config, tasks):
                 th_push_link)
 
         # add the payload and adjust anything else as required (e.g., scopes)
         payload_builders[task['worker']['implementation']](config, task, task_def)
 
         attributes = task.get('attributes', {})
         attributes['run_on_projects'] = task.get('run-on-projects', ['all'])
         attributes['always_target'] = task['always-target']
-        attributes['shipping_phase'] = task['shipping-phase']
-        attributes['shipping_product'] = task['shipping-product']
+        # This logic is here since downstream tasks don't always match their
+        # upstream dependency's shipping_phase.
+        # A basestring task['shipping-phase'] takes precedence, then
+        # an existing attributes['shipping_phase'], then fall back to None.
+        if task.get('shipping-phase') is not None:
+            attributes['shipping_phase'] = task['shipping-phase']
+        else:
+            attributes.setdefault('shipping_phase', None)
+        # shipping_product will always match the upstream task's
+        # shipping_product, so a pre-set existing attributes['shipping_product']
+        # takes precedence over task['shipping-product']. However, make sure
+        # we don't have conflicting values.
+        if task.get('shipping-product') and \
+                attributes.get('shipping_product') not in (None, task['shipping-product']):
+            raise Exception(
+                "{} shipping_product {} doesn't match task shipping-product {}!".format(
+                    task['label'], attributes['shipping_product'], task['shipping-product']
+                )
+            )
+        attributes.setdefault('shipping_product', task['shipping-product'])
 
         # Set MOZ_AUTOMATION on all jobs.
         if task['worker']['implementation'] in (
             'generic-worker',
             'docker-engine',
             'native-engine',
             'docker-worker',
         ):
             payload = task_def.get('payload')
             if payload:
                 env = payload.setdefault('env', {})
                 env['MOZ_AUTOMATION'] = '1'
 
         notifications = task.get('notifications')
         if notifications:
+            release_config = get_release_config(config)
             task_def['extra'].setdefault('notifications', {})
-            for k, v in notifications.items():
-                if isinstance(v, dict) and len(v) == 1 and v.keys()[0].startswith('by-'):
-                    v = {'tmp': v}
-                    resolve_keyed_by(v, 'tmp', 'notifications', **config.params)
-                    v = v['tmp']
-                if v is None:
-                    continue
-                elif isinstance(v, list):
-                    v = {'ids': v}
-                    if 'completed' == k:
-                        v.update({
-                            "subject": "Completed: {}".format(FULL_TASK_NAME),
-                            "message": "{} has completed successfully! Yay!".format(
-                                FULL_TASK_NAME),
-                        })
-                    elif k == 'failed':
-                        v.update({
-                            "subject": "Failed: {}".format(FULL_TASK_NAME),
-                            "message": "Uh-oh! {} failed.".format(FULL_TASK_NAME),
-                        })
-                    elif k == 'exception':
-                        v.update({
-                            "subject": "Exception: {}".format(FULL_TASK_NAME),
-                            "message": "Uh-oh! {} resulted in an exception.".format(
-                                FULL_TASK_NAME),
-                        })
-                else:
-                    resolve_keyed_by(v, 'ids', 'notifications', **config.params)
-                if v['ids'] is None:
-                    continue
-                notifications_kwargs = dict(
-                    task=task_def,
-                    config=config.__dict__,
-                    release_config=get_release_config(config, force=True),
-                )
-                task_def['extra']['notifications']['task-' + k] = {
-                    'subject': v['subject'].format(**notifications_kwargs),
-                    'message': v['message'].format(**notifications_kwargs),
-                    'ids': v['ids'],
-                }
+            for notification_event, notification in notifications.items():
+
+                for notification_option, notification_option_value in notification.items():
+
+                    # resolve by-project
+                    resolve_keyed_by(
+                        notification,
+                        notification_option,
+                        'notifications',
+                        project=config.params['project'],
+                    )
+
+                    # resolve formatting for each of the fields
+                    format_kwargs = dict(
+                            task=task,
+                            task_def=task_def,
+                            config=config.__dict__,
+                            release_config=release_config,
+                    )
+                    if isinstance(notification_option_value, basestring):
+                        notification[notification_option] = notification_option_value.format(
+                            **format_kwargs
+                        )
+                    elif isinstance(notification_option_value, list):
+                        notification[notification_option] = [
+                            i.format(**format_kwargs) for i in notification_option_value
+                        ]
+
+                # change event to correct event
+                if notification_event != 'artifact-created':
+                    notification_event = 'task-' + notification_event
+
+                # update notifications
+                task_def['extra']['notifications'][notification_event] = notification
 
         yield {
             'label': task['label'],
             'task': task_def,
             'dependencies': task.get('dependencies', {}),
             'attributes': attributes,
             'optimization': task.get('optimization', None),
         }