--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -1538,48 +1538,59 @@ class PackageFrontend(MachCommandBase):
@SubCommand('artifact', 'toolchain')
@CommandArgument('--verbose', '-v', action='store_true',
help='Print verbose output.')
@CommandArgument('--cache-dir', metavar='DIR',
help='Directory where to store the artifacts cache')
@CommandArgument('--skip-cache', action='store_true',
help='Skip all local caches to force re-fetching remote artifacts.',
default=False)
+ @CommandArgument('--for-job', metavar='JOB',
+ help='Get toolchains required for the given job')
@CommandArgument('--from-build', metavar='BUILD', nargs='+',
help='Get toolchains resulting from the given build(s)')
@CommandArgument('--tooltool-manifest', metavar='MANIFEST',
help='Explicit tooltool manifest to process')
@CommandArgument('--authentication-file', metavar='FILE',
help='Use the RelengAPI token found in the given file to authenticate')
@CommandArgument('--tooltool-url', metavar='URL',
help='Use the given url as tooltool server')
@CommandArgument('--no-unpack', action='store_true',
help='Do not unpack any downloaded file')
@CommandArgument('--retry', type=int, default=0,
help='Number of times to retry failed downloads')
@CommandArgument('files', nargs='*',
help='Only download the given file names (you may use file name stems)')
def artifact_toolchain(self, verbose=False, cache_dir=None,
- skip_cache=False, from_build=(),
+ skip_cache=False, for_job=None, from_build=(),
tooltool_manifest=None, authentication_file=None,
tooltool_url=None, no_unpack=False, retry=None,
files=()):
'''Download, cache and install pre-built toolchains.
'''
from mozbuild.artifacts import ArtifactCache
from mozbuild.action.tooltool import (
FileRecord,
open_manifest,
unpack_file,
)
from requests.adapters import HTTPAdapter
import redo
import requests
import shutil
+ # Normally, we'd have this in virtualenv_packages.txt, but that causes
+ # problems because of the embedded (and different) copies of things
+ # that are elsewhere (e.g. mozinfo). Well, ideally, we wouldn't even
+ # use mozharness, but until the tooltool manifests can be found in
+ # taskcluster configs instead of mozharness config (bug 1356952),
+ # we'll have to use mozharness. Ugly, but it does the job for now.
+ sys.path.append(os.path.join(self.topsrcdir, 'testing', 'mozharness'))
+
+ from mozharness.base.config import parse_config_file
from taskgraph.generator import Kind
from taskgraph.optimize import optimize_task
from taskgraph.util.taskcluster import (
get_artifact_url,
list_artifacts,
)
import yaml
@@ -1632,28 +1643,21 @@ class PackageFrontend(MachCommandBase):
def validate(self):
if self.size is None and self.digest is None:
return True
return super(DownloadRecord, self).validate()
records = OrderedDict()
downloaded = []
- if tooltool_manifest:
- manifest = open_manifest(tooltool_manifest)
- for record in manifest.file_records:
- url = '{}/{}/{}'.format(tooltool_url, record.algorithm,
- record.digest)
- records[record.filename] = DownloadRecord(
- url, record.filename, record.size, record.digest,
- record.algorithm, unpack=record.unpack,
- version=record.version, visibility=record.visibility,
- setup=record.setup)
+ from_build = from_build or []
+ files_from_build = {}
+ tooltool_manifests = [tooltool_manifest] if tooltool_manifest else []
- if from_build:
+ if from_build or for_job:
params = {
'message': '',
'project': '',
'level': os.environ.get('MOZ_SCM_LEVEL', '3'),
'base_repository': '',
'head_repository': '',
'head_rev': '',
'moz_build_date': '',
@@ -1662,66 +1666,119 @@ class PackageFrontend(MachCommandBase):
'owner': '',
}
# TODO: move to the taskcluster package
def tasks(kind):
kind_path = mozpath.join(self.topsrcdir, 'taskcluster', 'ci', kind)
with open(mozpath.join(kind_path, 'kind.yml')) as f:
config = yaml.load(f)
- tasks = Kind(kind, kind_path, config).load_tasks(params, {})
+ loaded_tasks = []
+ for dep in config.get('kind-dependencies', ()):
+ loaded_tasks.extend(tasks(dep).itervalues())
return {
task.task['metadata']['name']: task
- for task in tasks
+ for task in Kind(kind, kind_path, config).load_tasks(
+ params, loaded_tasks)
}
+ def normalize_task_name(name, kind):
+ if '/' not in name:
+ name = '{}/opt'.format(name)
+
+ if not name.startswith('{}-'.format(kind)):
+ name = '{}-{}'.format(kind, name)
+
+ return name
+
toolchains = tasks('toolchain')
- for b in from_build:
- user_value = b
+ if for_job:
+ task = None
- if '/' not in b:
- b = '{}/opt'.format(b)
+ for kind in os.listdir(os.path.join(
+ self.topsrcdir, 'taskcluster', 'ci')):
+ if for_job.startswith('{}-'.format(kind)):
+ jobs = tasks(kind)
+ j = normalize_task_name(for_job, kind)
+ task = jobs.get(j)
+ break
+
+ if not task:
+ j = normalize_task_name(for_job, 'build')
+ builds = tasks('build')
+ task = builds.get(j)
- if not b.startswith('toolchain-'):
- b = 'toolchain-{}'.format(b)
+ if not task:
+ self.log(logging.ERROR, 'artifact', {'job': for_job},
+ 'Could not find a job named `{job}`')
+ return 1
+ env = task.task.get('payload', {}).get('env', {})
+ mozharness_configs = env.get('MOZHARNESS_CONFIG', '')
+ for c in mozharness_configs.split():
+ config = parse_config_file(os.path.join(
+ self.topsrcdir, 'testing', 'mozharness', 'configs', c))
+ m = config.get('tooltool_manifest_src')
+ if m:
+ tooltool_manifests.append(os.path.join(
+ self.topsrcdir, m))
- task = toolchains.get(b)
- if not task:
- self.log(logging.ERROR, 'artifact', {'build': user_value},
- 'Could not find a toolchain build named `{build}`')
- return 1
+ for d in task.dependencies.itervalues():
+ if d in toolchains:
+ from_build.append(d)
- optimized, task_id = optimize_task(task, {})
- if not optimized:
- self.log(logging.ERROR, 'artifact', {'build': user_value},
- 'Could not find artifacts for a toolchain build '
- 'named `{build}`')
- return 1
+ for tooltool_manifest in tooltool_manifests:
+ manifest = open_manifest(tooltool_manifest)
+ for record in manifest.file_records:
+ url = '{}/{}/{}'.format(tooltool_url, record.algorithm,
+ record.digest)
+ records[record.filename] = DownloadRecord(
+ url, record.filename, record.size, record.digest,
+ record.algorithm, unpack=record.unpack,
+ version=record.version, visibility=record.visibility,
+ setup=record.setup)
+
+ for b in from_build:
+ user_value = b
+ b = normalize_task_name(b, 'toolchain')
- for artifact in list_artifacts(task_id):
- name = artifact['name']
- if not name.startswith('public/'):
- continue
- name = name[len('public/'):]
- if name.startswith('logs/'):
- continue
- name = os.path.basename(name)
- records[name] = DownloadRecord(
- get_artifact_url(task_id, artifact['name']),
- name, None, None, None, unpack=True)
+ task = toolchains.get(b)
+ if not task:
+ self.log(logging.ERROR, 'artifact', {'build': user_value},
+ 'Could not find a toolchain build named `{build}`')
+ return 1
+
+ optimized, task_id = optimize_task(task, {})
+ if not optimized:
+ self.log(logging.ERROR, 'artifact', {'build': user_value},
+ 'Could not find artifacts for a toolchain build '
+ 'named `{build}`')
+ return 1
+
+ for artifact in list_artifacts(task_id):
+ name = artifact['name']
+ if not name.startswith('public/'):
+ continue
+ name = name[len('public/'):]
+ if name.startswith('logs/'):
+ continue
+ name = os.path.basename(name)
+ records[name] = DownloadRecord(
+ get_artifact_url(task_id, artifact['name']),
+ name, None, None, None, unpack=True)
for record in records.itervalues():
if files and not any(record.basename == f or
record.basename.startswith('%s.' % f)
for f in files):
continue
- self.log(logging.INFO, 'artifact', {'name': record.basename},
- 'Downloading {name}')
+ self.log(logging.INFO, 'artifact', {'name': record.basename,
+ 'url': record.url},
+ 'Downloading {name} from {url}')
valid = False
# sleeptime is 60 per retry.py, used by tooltool_wrapper.sh
for attempt, _ in enumerate(redo.retrier(attempts=retry+1,
sleeptime=60)):
try:
record.fetch_with(cache)
except requests.exceptions.HTTPError as e:
status = e.response.status_code