Bug 1384775 - Add some taskgraph hacks for comm-central. draft
authorTom Prince <mozilla@hocat.ca>
Wed, 26 Jul 2017 15:31:45 -0600
changeset 616418 e1962248fc77ff916802455836fae97f9e438d47
parent 615176 32d9d1e81cc607320a36391845917f645f7a7f72
child 639463 d1c95f5cae2b4e35575e8812eb6d5f1925159a3d
push id70675
push userbmo:mozilla@hocat.ca
push dateThu, 27 Jul 2017 01:09:59 +0000
bugs1384775
milestone56.0a1
Bug 1384775 - Add some taskgraph hacks for comm-central. MozReview-Commit-ID: 2BmOo9ZjpFf
taskcluster/docs/parameters.rst
taskcluster/mach_commands.py
taskcluster/taskgraph/decision.py
taskcluster/taskgraph/parameters.py
taskcluster/taskgraph/transforms/job/common.py
taskcluster/taskgraph/transforms/job/run_task.py
taskcluster/taskgraph/transforms/task.py
--- a/taskcluster/docs/parameters.rst
+++ b/taskcluster/docs/parameters.rst
@@ -97,8 +97,15 @@ syntax or reading a project-specific con
     The method to use to determine the target task set.  This is the suffix of
     one of the functions in ``taskcluster/taskgraph/target_tasks.py``.
 
 ``optimize_target_tasks``
    If true, then target tasks are eligible for optimization.
 
 ``include_nightly``
    If true, then nightly tasks are eligible for optimization.
+
+Comm Push Information
+---------------------
+``comm_base_repository``
+``comm_head_repository``
+``comm_head_rev``
+``comm_head_ref``
--- a/taskcluster/mach_commands.py
+++ b/taskcluster/mach_commands.py
@@ -113,16 +113,28 @@ class MachCommands(MachCommandBase):
                      required=True,
                      help='URL for "head" repository to fetch revision from')
     @CommandArgument('--head-ref',
                      required=True,
                      help='Reference (this is same as rev usually for hg)')
     @CommandArgument('--head-rev',
                      required=True,
                      help='Commit revision to use from head repository')
+    @CommandArgument('--comm-base-repository',
+                     required=False,
+                     help='URL for "base" comm-* repository to clone')
+    @CommandArgument('--comm-head-repository',
+                     required=False,
+                     help='URL for "head" comm-* repository to fetch revision from')
+    @CommandArgument('--comm-head-ref',
+                     required=False,
+                     help='comm-* Reference (this is same as rev usually for hg)')
+    @CommandArgument('--comm-head-rev',
+                     required=False,
+                     help='Commit revision to use from head comm-* repository')
     @CommandArgument('--message',
                      required=True,
                      help='Commit message to be parsed. Example: "try: -b do -p all -u all"')
     @CommandArgument('--project',
                      required=True,
                      help='Project to use for creating task graph. Example: --project=try')
     @CommandArgument('--pushlog-id',
                      dest='pushlog_id',
--- a/taskcluster/taskgraph/decision.py
+++ b/taskcluster/taskgraph/decision.py
@@ -155,16 +155,25 @@ def get_decision_parameters(options):
         'project',
         'pushlog_id',
         'pushdate',
         'owner',
         'level',
         'target_tasks_method',
     ] if n in options}
 
+    for n in (
+        'comm_base_repository',
+        'comm_head_repository',
+        'comm_head_rev',
+        'comm_head_ref',
+    ):
+        if n in options:
+            parameters[n] = options[n]
+
     # Define default filter list, as most configurations shouldn't need
     # custom filters.
     parameters['filters'] = [
         'check_servo',
         'target_tasks_method',
     ]
 
     # owner must be an email, but sometimes (e.g., for ffxbld) it is not, in which
--- a/taskcluster/taskgraph/parameters.py
+++ b/taskcluster/taskgraph/parameters.py
@@ -25,36 +25,50 @@ PARAMETER_NAMES = set([
     'optimize_target_tasks',
     'owner',
     'project',
     'pushdate',
     'pushlog_id',
     'target_tasks_method',
 ])
 
+COMM_PARAMETER_NAMES = set([
+    'comm_base_repository',
+    'comm_head_ref',
+    'comm_head_repository',
+    'comm_head_rev',
+])
+
 
 class Parameters(ReadOnlyDict):
     """An immutable dictionary with nicer KeyError messages on failure"""
     def check(self):
         names = set(self)
         msg = []
 
         missing = PARAMETER_NAMES - names
         if missing:
             msg.append("missing parameters: " + ", ".join(missing))
 
         extra = names - PARAMETER_NAMES
+
+        if extra & COMM_PARAMETER_NAMES:
+            missing = extra - COMM_PARAMETER_NAMES
+            if missing:
+                msg.append("missing parameters: " + ", ".join(missing))
+            extra = extra - COMM_PARAMETER_NAMES
+
         if extra:
             msg.append("extra parameters: " + ", ".join(extra))
 
         if msg:
             raise Exception("; ".join(msg))
 
     def __getitem__(self, k):
-        if k not in PARAMETER_NAMES:
+        if k not in PARAMETER_NAMES | COMM_PARAMETER_NAMES:
             raise KeyError("no such parameter {!r}".format(k))
         try:
             return super(Parameters, self).__getitem__(k)
         except KeyError:
             raise KeyError("taskgraph parameter {!r} not found".format(k))
 
 
 def load_parameters_file(filename):
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -101,16 +101,23 @@ def support_vcs_checkout(config, job, ta
 
     taskdesc['worker'].setdefault('env', {}).update({
         'GECKO_BASE_REPOSITORY': config.params['base_repository'],
         'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
         'GECKO_HEAD_REV': config.params['head_rev'],
         'HG_STORE_PATH': '~/checkouts/hg-store',
     })
 
+    if job['run']['comm-checkout']:
+        taskdesc['worker']['env'].update({
+            'COMM_BASE_REPOSITORY': config.params['comm_base_repository'],
+            'COMM_HEAD_REPOSITORY': config.params['comm_head_repository'],
+            'COMM_HEAD_REV': config.params['comm_head_rev'],
+        })
+
     # Give task access to hgfingerprint secret so it can pin the certificate
     # for hg.mozilla.org.
     taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
 
     # only some worker platforms have taskcluster-proxy enabled
     if job['worker']['implementation'] in ('docker-worker', 'docker-engine'):
         taskdesc['worker']['taskcluster-proxy'] = True
 
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -17,16 +17,19 @@ run_task_schema = Schema({
 
     # if true, add a cache at ~worker/.cache, which is where things like pip
     # tend to hide their caches.  This cache is never added for level-1 jobs.
     Required('cache-dotcache', default=False): bool,
 
     # if true (the default), perform a checkout in /home/worker/checkouts/gecko
     Required('checkout', default=True): bool,
 
+    # if true, perform a checkout in /home/worker/checkouts/gecko
+    Required('comm-checkout', default=False): bool,
+
     # The command arguments to pass to the `run-task` script, after the
     # checkout arguments.  If a list, it will be passed directly; otherwise
     # it will be included in a single argument to `bash -cx`.
     Required('command'): Any([basestring], basestring),
 })
 
 
 def common_setup(config, job, taskdesc):
@@ -49,16 +52,18 @@ def docker_worker_run_task(config, job, 
         })
 
     run_command = run['command']
     if isinstance(run_command, basestring):
         run_command = ['bash', '-cx', run_command]
     command = ['/home/worker/bin/run-task']
     if run['checkout']:
         command.append('--vcs-checkout=~/checkouts/gecko')
+    if run['comm-checkout']:
+        command.append('--comm-checkout=/home/worker/checkouts/gecko/comm')
     command.append('--fetch-hgfingerprint')
     command.append('--')
     command.extend(run_command)
     worker['command'] = command
 
 
 @run_job_using("native-engine", "run-task", schema=run_task_schema)
 def native_engine_run_task(config, job, taskdesc):
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -160,17 +160,18 @@ task_description_schema = Schema({
         # For tasks that will run in docker-worker or docker-engine, this is the
         # name of the docker image or in-tree docker image to run the task in.  If
         # in-tree, then a dependency will be created automatically.  This is
         # generally `desktop-test`, or an image that acts an awful lot like it.
         Required('docker-image'): Any(
             # a raw Docker image path (repo/image:tag)
             basestring,
             # an in-tree generated docker image (from `taskcluster/docker/<name>`)
-            {'in-tree': basestring}
+            #{'in-tree': basestring},
+            dict,
         ),
 
         # worker features that should be enabled
         Required('relengapi-proxy', default=False): bool,
         Required('chain-of-trust', default=False): bool,
         Required('taskcluster-proxy', default=False): bool,
         Required('allow-ptrace', default=False): bool,
         Required('loopback-video', default=False): bool,
@@ -500,17 +501,17 @@ V2_L10N_TEMPLATES = [
 # the roots of the treeherder routes, keyed by treeherder environment
 TREEHERDER_ROUTE_ROOTS = {
     'production': 'tc-treeherder',
     'staging': 'tc-treeherder-stage',
 }
 
 COALESCE_KEY = 'builds.{project}.{name}'
 
-DEFAULT_BRANCH_PRIORITY = 'low'
+DEFAULT_BRANCH_PRIORITY = 'very-low'
 BRANCH_PRIORITIES = {
     'mozilla-release': 'highest',
     'comm-esr45': 'highest',
     'comm-esr52': 'highest',
     'mozilla-esr45': 'very-high',
     'mozilla-esr52': 'very-high',
     'mozilla-beta': 'high',
     'comm-beta': 'high',
@@ -562,17 +563,17 @@ def index_builder(name):
     return wrap
 
 
 @payload_builder('docker-worker')
 def build_docker_worker_payload(config, task, task_def):
     worker = task['worker']
 
     image = worker['docker-image']
-    if isinstance(image, dict):
+    if isinstance(image, dict) and 'in-tree' in image:
         docker_image_task = 'build-docker-image-' + image['in-tree']
         task.setdefault('dependencies', {})['docker-image'] = docker_image_task
         image = {
             "path": "public/image.tar.zst",
             "taskId": {"task-reference": "<docker-image>"},
             "type": "task-image",
         }
 
@@ -709,17 +710,17 @@ def build_generic_worker_payload(config,
 
 
 @payload_builder('scriptworker-signing')
 def build_scriptworker_signing_payload(config, task, task_def):
     worker = task['worker']
 
     task_def['payload'] = {
         'maxRunTime': worker['max-run-time'],
-        'upstreamArtifacts':  worker['upstream-artifacts']
+        'upstreamArtifacts': worker['upstream-artifacts']
     }
 
 
 @payload_builder('beetmover')
 def build_beetmover_payload(config, task, task_def):
     worker = task['worker']
     release_config = get_release_config(config)
 
@@ -971,23 +972,16 @@ def build_task(config, tasks):
                 treeherder['groupSymbol'] = groupSymbol
                 if groupSymbol not in GROUP_NAMES:
                     raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol))
                 treeherder['groupName'] = GROUP_NAMES[groupSymbol]
             treeherder['symbol'] = symbol
             treeherder['jobKind'] = task_th['kind']
             treeherder['tier'] = task_th['tier']
 
-            routes.extend([
-                '{}.v2.{}.{}.{}'.format(TREEHERDER_ROUTE_ROOTS[env],
-                                        config.params['project'],
-                                        config.params['head_rev'],
-                                        config.params['pushlog_id'])
-                for env in task_th['environments']
-            ])
 
         if 'expires-after' not in task:
             task['expires-after'] = '28 days' if config.params['project'] == 'try' else '1 year'
 
         if 'deadline-after' not in task:
             task['deadline-after'] = '1 day'
 
         if 'coalesce-name' in task and int(config.params['level']) > 1: