Bug 1391789 - Consolidate tooltool modifications to shared function; r?dustin
This pattern is repeated a number of times. Let's consolidate it.
A few places set relengapi-proxy=False. AFAICT this was a no-op because
the default value in the worker schema is False. So, these assignments
were dropped.
MozReview-Commit-ID: EoNPBhaMT2J
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -124,8 +124,40 @@ def docker_worker_setup_secrets(config,
taskdesc['worker']['taskcluster-proxy'] = True
secrets = job['run']['secrets']
if secrets is True:
secrets = ['*']
for sec in secrets:
taskdesc['scopes'].append(SECRET_SCOPE.format(
job['treeherder']['kind'], config.params['level'], sec))
+
+
+def docker_worker_add_tooltool(config, job, taskdesc, internal=False):
+ """Give the task access to tooltool.
+
+ Enables the tooltool cache. Adds releng proxy. Configures scopes.
+
+ By default, only public tooltool access will be granted. Access to internal
+ tooltool can be enabled via ``internal=True``.
+ """
+
+ assert job['worker']['implementation'] in ('docker-worker', 'docker-engine')
+
+ taskdesc['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+
+ taskdesc['worker'].setdefault('env', {}).update({
+ 'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+ })
+
+ job['worker']['relengapi-proxy'] = True
+ taskdesc['scopes'].extend([
+ 'docker-worker:relengapi-proxy:tooltool.download.public',
+ ])
+
+ if internal:
+ taskdesc['scopes'].extend([
+ 'docker-worker:relengapi-proxy:tooltool.download.internal',
+ ])
--- a/taskcluster/taskgraph/transforms/job/hazard.py
+++ b/taskcluster/taskgraph/transforms/job/hazard.py
@@ -10,16 +10,17 @@ from __future__ import absolute_import,
from taskgraph.util.schema import Schema
from voluptuous import Required, Optional, Any
from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.job.common import (
docker_worker_add_workspace_cache,
docker_worker_setup_secrets,
docker_worker_add_public_artifacts,
+ docker_worker_add_tooltool,
support_vcs_checkout,
)
haz_run_schema = Schema({
Required('using'): 'hazard',
# The command to run within the task image (passed through to the worker)
Required('command'): basestring,
@@ -37,45 +38,33 @@ haz_run_schema = Schema({
@run_job_using("docker-worker", "hazard", schema=haz_run_schema)
def docker_worker_hazard(config, job, taskdesc):
run = job['run']
worker = taskdesc['worker']
worker['artifacts'] = []
- worker['caches'] = []
docker_worker_add_public_artifacts(config, job, taskdesc)
docker_worker_add_workspace_cache(config, job, taskdesc)
+ docker_worker_add_tooltool(config, job, taskdesc)
docker_worker_setup_secrets(config, job, taskdesc)
support_vcs_checkout(config, job, taskdesc)
env = worker['env']
env.update({
'MOZ_BUILD_DATE': config.params['moz_build_date'],
'MOZ_SCM_LEVEL': config.params['level'],
})
# script parameters
if run.get('mozconfig'):
env['MOZCONFIG'] = run['mozconfig']
- # tooltool downloads
- worker['caches'].append({
- 'type': 'persistent',
- 'name': 'tooltool-cache',
- 'mount-point': '/home/worker/tooltool-cache',
- })
- worker['relengapi-proxy'] = True
- taskdesc['scopes'].extend([
- 'docker-worker:relengapi-proxy:tooltool.download.public',
- ])
- env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
-
# build-haz-linux.sh needs this otherwise it assumes the checkout is in
# the workspace.
env['GECKO_DIR'] = '/home/worker/checkouts/gecko'
worker['command'] = [
'/home/worker/bin/run-task',
'--chown-recursive', '/home/worker/tooltool-cache',
'--chown-recursive', '/home/worker/workspace',
--- a/taskcluster/taskgraph/transforms/job/mozharness.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness.py
@@ -17,16 +17,17 @@ from taskgraph.util.schema import Schema
from voluptuous import Required, Optional, Any
from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.job.common import (
docker_worker_add_workspace_cache,
docker_worker_add_gecko_vcs_env_vars,
docker_worker_setup_secrets,
docker_worker_add_public_artifacts,
+ docker_worker_add_tooltool,
generic_worker_add_public_artifacts,
support_vcs_checkout,
)
COALESCE_KEY = 'builds.{project}.{name}'
mozharness_run_schema = Schema({
Required('using'): 'mozharness',
@@ -110,17 +111,16 @@ def mozharness_on_docker_worker_setup(co
"'use-magic-mh-args' on docker-workers")
# Running via mozharness assumes an image that contains build.sh:
# by default, desktop-build, but it could be another image (like
# android-gradle-build) that "inherits" from desktop-build.
if not taskdesc['worker']['docker-image']:
taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"}
- worker['relengapi-proxy'] = False # but maybe enabled for tooltool below
worker['taskcluster-proxy'] = run.get('taskcluster-proxy')
docker_worker_add_public_artifacts(config, job, taskdesc)
docker_worker_add_workspace_cache(config, job, taskdesc,
extra=run.get('extra-workspace-cache-key'))
support_vcs_checkout(config, job, taskdesc)
env = worker.setdefault('env', {})
@@ -153,31 +153,19 @@ def mozharness_on_docker_worker_setup(co
if not run['keep-artifacts']:
env['DIST_TARGET_UPLOADS'] = ''
env['DIST_UPLOADS'] = ''
# Xvfb
if run['need-xvfb']:
env['NEED_XVFB'] = 'true'
- # tooltool downloads
if run['tooltool-downloads']:
- worker['relengapi-proxy'] = True
- worker['caches'].append({
- 'type': 'persistent',
- 'name': 'tooltool-cache',
- 'mount-point': '/home/worker/tooltool-cache',
- })
- taskdesc['scopes'].extend([
- 'docker-worker:relengapi-proxy:tooltool.download.public',
- ])
- if run['tooltool-downloads'] == 'internal':
- taskdesc['scopes'].append(
- 'docker-worker:relengapi-proxy:tooltool.download.internal')
- env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+ internal = run['tooltool-downloads'] == 'internal'
+ docker_worker_add_tooltool(config, job, taskdesc, internal=internal)
# Retry if mozharness returns TBPL_RETRY
worker['retry-exit-status'] = 4
docker_worker_setup_secrets(config, job, taskdesc)
command = [
'/home/worker/bin/run-task',
--- a/taskcluster/taskgraph/transforms/job/mozharness_test.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness_test.py
@@ -8,16 +8,17 @@ from voluptuous import Required
from taskgraph.util.taskcluster import get_artifact_url
from taskgraph.transforms.job import run_job_using
from taskgraph.util.schema import Schema
from taskgraph.transforms.tests import (
test_description_schema,
normpath
)
from taskgraph.transforms.job.common import (
+ docker_worker_add_tooltool,
support_vcs_checkout,
)
import os
BUILDER_NAME_PREFIX = {
'linux64-pgo': 'Ubuntu VM 12.04 x64',
'linux64': 'Ubuntu VM 12.04 x64',
'linux64-nightly': 'Ubuntu VM 12.04 x64',
@@ -143,26 +144,17 @@ def mozharness_test_on_docker(config, jo
env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])
if config.params['project'] == 'try':
env['TRY_COMMIT_MSG'] = config.params['message']
# handle some of the mozharness-specific options
if mozharness['tooltool-downloads']:
- worker['relengapi-proxy'] = True
- worker['caches'].append({
- 'type': 'persistent',
- 'name': 'tooltool-cache',
- 'mount-point': '/home/worker/tooltool-cache',
- })
- taskdesc['scopes'].extend([
- 'docker-worker:relengapi-proxy:tooltool.download.internal',
- 'docker-worker:relengapi-proxy:tooltool.download.public',
- ])
+ docker_worker_add_tooltool(config, job, taskdesc, internal=True)
if test['reboot']:
raise Exception('reboot: {} not supported on generic-worker'.format(test['reboot']))
# assemble the command line
command = [
'/home/worker/bin/run-task',
# The workspace cache/volume is default owned by root:root.
--- a/taskcluster/taskgraph/transforms/job/spidermonkey.py
+++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py
@@ -10,16 +10,17 @@ from __future__ import absolute_import,
from taskgraph.util.schema import Schema
from voluptuous import Required, Any
from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.job.common import (
docker_worker_add_public_artifacts,
generic_worker_add_public_artifacts,
docker_worker_add_gecko_vcs_env_vars,
+ docker_worker_add_tooltool,
support_vcs_checkout,
)
sm_run_schema = Schema({
Required('using'): Any('spidermonkey', 'spidermonkey-package', 'spidermonkey-mozjs-crate'),
# The SPIDERMONKEY_VARIANT
Required('spidermonkey-variant'): basestring,
@@ -41,34 +42,26 @@ def docker_worker_spidermonkey(config, j
worker['caches'].append({
'type': 'persistent',
'name': 'level-{}-{}-build-spidermonkey-workspace'.format(
config.params['level'], config.params['project']),
'mount-point': "/home/worker/workspace",
})
docker_worker_add_public_artifacts(config, job, taskdesc)
+ docker_worker_add_tooltool(config, job, taskdesc)
env = worker.setdefault('env', {})
env.update({
'MOZHARNESS_DISABLE': 'true',
'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
'MOZ_BUILD_DATE': config.params['moz_build_date'],
'MOZ_SCM_LEVEL': config.params['level'],
})
- # tooltool downloads; note that this script downloads using the API
- # endpoiint directly, rather than via relengapi-proxy
- worker['caches'].append({
- 'type': 'persistent',
- 'name': 'tooltool-cache',
- 'mount-point': '/home/worker/tooltool-cache',
- })
- env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
-
support_vcs_checkout(config, job, taskdesc)
script = "build-sm.sh"
if run['using'] == 'spidermonkey-package':
script = "build-sm-package.sh"
elif run['using'] == 'spidermonkey-mozjs-crate':
script = "build-sm-mozjs-crate.sh"
--- a/taskcluster/taskgraph/transforms/job/toolchain.py
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -12,16 +12,17 @@ import hashlib
from taskgraph.util.schema import Schema
from voluptuous import Optional, Required, Any
from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.job.common import (
docker_worker_add_tc_vcs_cache,
docker_worker_add_gecko_vcs_env_vars,
docker_worker_add_public_artifacts,
+ docker_worker_add_tooltool,
support_vcs_checkout,
)
from taskgraph.util.hash import hash_paths
from taskgraph import GECKO
TOOLCHAIN_INDEX = 'gecko.cache.level-{level}.toolchains.v1.{name}.{digest}'
@@ -91,52 +92,34 @@ def add_optimizations(config, run, taskd
@run_job_using("docker-worker", "toolchain-script", schema=toolchain_run_schema)
def docker_worker_toolchain(config, job, taskdesc):
run = job['run']
taskdesc['run-on-projects'] = ['trunk', 'try']
worker = taskdesc['worker']
worker['artifacts'] = []
- worker['caches'] = []
worker['chain-of-trust'] = True
docker_worker_add_public_artifacts(config, job, taskdesc)
docker_worker_add_tc_vcs_cache(config, job, taskdesc)
docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
support_vcs_checkout(config, job, taskdesc)
env = worker['env']
env.update({
'MOZ_BUILD_DATE': config.params['moz_build_date'],
'MOZ_SCM_LEVEL': config.params['level'],
'TOOLS_DISABLE': 'true',
'MOZ_AUTOMATION': '1',
})
- # tooltool downloads. By default we download using the API endpoint, but
- # the job can optionally request relengapi-proxy (for example when downloading
- # internal tooltool resources. So we define the tooltool cache unconditionally.
- worker['caches'].append({
- 'type': 'persistent',
- 'name': 'tooltool-cache',
- 'mount-point': '/home/worker/tooltool-cache',
- })
- env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
-
- # tooltool downloads
- worker['relengapi-proxy'] = False # but maybe enabled for tooltool below
if run['tooltool-downloads']:
- worker['relengapi-proxy'] = True
- taskdesc['scopes'].extend([
- 'docker-worker:relengapi-proxy:tooltool.download.public',
- ])
- if run['tooltool-downloads'] == 'internal':
- taskdesc['scopes'].append(
- 'docker-worker:relengapi-proxy:tooltool.download.internal')
+ internal = run['tooltool-downloads'] == 'internal'
+ docker_worker_add_tooltool(config, job, taskdesc, internal=internal)
worker['command'] = [
'/home/worker/bin/run-task',
# Various caches/volumes are default owned by root:root.
'--chown-recursive', '/home/worker/workspace',
'--chown-recursive', '/home/worker/tooltool-cache',
'--vcs-checkout=/home/worker/workspace/build/src',
'--',