*
Bug 1381669 - Port backfilling to actions.json
MozReview-Commit-ID: 5GgnOU2e75v
new file mode 100644
--- /dev/null
+++ b/taskcluster/actions/backfill.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+
+import requests
+from slugid import nice as slugid
+
+from .registry import register_callback_action
+from .util import (clone_and_update_task, create_task)
+
+TASKCLUSTER_INDEX = 'https://index.taskcluster.net'
+TASKCLUSTER_INDEX_TMPL = '{}/v1/task/gecko.v2.{}.pushlog-id.{}.decision/artifacts/{}'
+PUSHLOG_TMPL = '{}json-pushes?version=2&startID={}&endID={}'
+
+logger = logging.getLogger(__name__)
+
+
+@register_callback_action(
+ title='Backfill',
+ name='backfill',
+ symbol='Bk',
+ description='Adds a specific job to previous pushes.',
+ order=0,
+ context=[{}], # This will be available for all tasks
+ schema={
+ 'type': 'object',
+ 'properties': {
+ 'depth': {
+ 'type': 'integer',
+ 'default': 5,
+ 'minimum': 1,
+ 'maximum': 10,
+ 'title': 'Search Depth',
+ 'description': 'How many pushes back in the tree to trigger this job on.',
+ },
+ },
+ 'additionalProperties': False,
+ }
+ available=lambda parameters: parameters.get('project', None) != 'try',
+)
+def backfill_action(parameters, input, task_group_id, task_id, task):
+ pushes = []
+ depth = input.get('depth', 5)
+ end_id = int(parameters['pushlog_id']) - 1
+
+ while True:
+ start_id = max(end_id - depth, 0)
+ pushlog_url = PUSHLOG_TMPL.format(parameters['head_repository'], start_id, end_id)
+ r = requests.get(pushlog_url)
+ r.raise_for_status()
+ pushes = pushes + r.json()['pushes'].keys()
+ if len(pushes) >= depth:
+ break
+ pushes = sorted(pushes)[-depth:]
+
+ r = requests.get(pushlog_url)
+ r.raise_for_status()
+
+ label = task['metadata']['name']
+
+ for push in pushes:
+ index_url = TASKCLUSTER_INDEX_TMPL.format(TASKCLUSTER_INDEX,
+ parameters['project'],
+ push,
+ 'public/full-task-graph.json')
+ r = requests.get(index_url)
+ if r.status_code != requests.codes.ok:
+ if r.status_code == 404:
+ logging.info('Couln\'t find task-graph on push {}. err: {}'.format(push, r.text))
+ continue
+ else:
+ r.raise_for_status()
+ if label in r.json().keys():
+ new_task_definition = clone_and_update_task(r.json()[label]['task'], '1d', '30d')
+ logging.info('New task definition: %s', new_task_definition)
+
+ # actually create the new task
+ new_task_id = slugid()
+ create_task(new_task_id, new_task_definition)
+ else:
+ logging.info('Could not find {} on {}. Skipping.'.format(label, push))
--- a/taskcluster/actions/test-retrigger-action.py
+++ b/taskcluster/actions/test-retrigger-action.py
@@ -1,21 +1,15 @@
-import copy
import json
import logging
-import requests
from slugid import nice as slugid
from .registry import register_callback_action
-from taskgraph.create import create_task
-from taskgraph.util.time import (
- current_json_time,
- json_time_from_now
-)
+from .util import (clone_and_update_task, create_task)
TASKCLUSTER_QUEUE_URL = "https://queue.taskcluster.net/v1/task"
logger = logging.getLogger(__name__)
@register_callback_action(
name='run-with-options',
@@ -70,26 +64,17 @@ logger = logging.getLogger(__name__)
'description': 'Extra gecko (about:config) preferences to use for this run'
}
},
'additionalProperties': False,
'required': ['path']
}
)
def test_retrigger_action(parameters, input, task_group_id, task_id, task):
- new_task_definition = copy.copy(task)
-
- # set new created, deadline, and expiry fields
- new_task_definition['created'] = current_json_time()
- new_task_definition['deadline'] = json_time_from_now('1d')
- new_task_definition['expires'] = json_time_from_now('30d')
-
- # reset artifact expiry
- for artifact in new_task_definition['payload'].get('artifacts', {}).values():
- artifact['expires'] = new_task_definition['expires']
+ new_task_definition = clone_and_update_task(task, '1d', '30d')
# don't want to run mozharness tests, want a custom mach command instead
new_task_definition['payload']['command'] += ['--no-run-tests']
custom_mach_command = [task['tags']['test-type']]
# mochitests may specify a flavor
if new_task_definition['payload']['env'].get('MOCHITEST_FLAVOR'):
@@ -125,10 +110,9 @@ def test_retrigger_action(parameters, in
# tweak the treeherder symbol
new_task_definition['extra']['treeherder']['symbol'] += '-custom'
logging.info("New task definition: %s", new_task_definition)
# actually create the new task
new_task_id = slugid()
logger.info("Creating new mochitest task with id %s", new_task_id)
- session = requests.Session()
- create_task(session, new_task_id, 'test-retrigger', new_task_definition)
+ create_task(new_task_id, new_task_definition)
--- a/taskcluster/actions/util.py
+++ b/taskcluster/actions/util.py
@@ -1,19 +1,24 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
+import copy
import json
import sys
from taskgraph import create
from taskgraph.util.taskcluster import get_session
+from taskgraph.util.time import (
+ current_json_time,
+ json_time_from_now
+)
# this is set to true for `mach taskgraph action-callback --test`
testing = False
def create_task(task_id, task_def):
"""Create a new task. The task definition will have {relative-datestamp':
'..'} rendered just like in a decision task. Action callbacks should use
@@ -21,8 +26,26 @@ def create_task(task_id, task_def):
allowing easy debugging with `mach taskgraph action-callback --test`."""
if testing:
json.dump([task_id, task_def], sys.stdout,
sort_keys=True, indent=4, separators=(',', ': '))
return
label = task_def['metadata']['name']
session = get_session()
create.create_task(session, task_id, label, task_def)
+
+
+def clone_and_update_task(task, deadline, expires):
+ new_task_definition = copy.copy(task)
+
+ # set new created, deadline, and expiry fields
+ new_task_definition['created'] = current_json_time()
+ new_task_definition['deadline'] = json_time_from_now(deadline)
+ new_task_definition['expires'] = json_time_from_now(expires)
+
+ # reset artifact expiry
+ artifacts = new_task_definition['payload'].get('artifacts', [])
+ if isinstance(artifacts, dict):
+ artifacts = artifacts.values()
+ for artifact in artifacts:
+ artifact['expires'] = new_task_definition['expires']
+
+ return new_task_definition