--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -1776,21 +1776,18 @@ class PackageFrontend(MachCommandBase):
open_manifest,
unpack_file,
)
from requests.adapters import HTTPAdapter
import redo
import requests
import shutil
- from taskgraph.generator import Kind
- from taskgraph.optimize import optimize_task
from taskgraph.util.taskcluster import (
get_artifact_url,
- list_artifacts,
)
import yaml
self._set_log_level(verbose)
# Normally, we'd use self.log_manager.enable_unstructured(),
# but that enables all logging, while we only really want tooltool's
# and it also makes structured log output twice.
# So we manually do what it does, and limit that to the tooltool
@@ -1876,63 +1873,48 @@ class PackageFrontend(MachCommandBase):
record.digest)
records[record.filename] = DownloadRecord(
url, record.filename, record.size, record.digest,
record.algorithm, unpack=record.unpack,
version=record.version, visibility=record.visibility,
setup=record.setup)
if from_build:
- params = {
- 'message': '',
- 'project': '',
- 'level': os.environ.get('MOZ_SCM_LEVEL', '3'),
- 'base_repository': '',
- 'head_repository': '',
- 'head_rev': '',
- 'moz_build_date': '',
- 'build_date': 0,
- 'pushlog_id': 0,
- 'owner': '',
- }
+ from taskgraph.util.taskcluster import find_task_id, get_artifact
+ # find the latest m-c decision task and use its notion of what artifacts are what
+ decision_task_id = find_task_id('gecko.v2.mozilla-central.latest.firefox.decision')
+ full_task_graph = get_artifact(decision_task_id, "public/full-task-graph.json")
+ label_to_taskid = get_artifact(decision_task_id, "public/label-to-taskid.json")
- # TODO: move to the taskcluster package
- def tasks(kind):
- kind_path = mozpath.join(self.topsrcdir, 'taskcluster', 'ci', kind)
- with open(mozpath.join(kind_path, 'kind.yml')) as f:
- config = yaml.load(f)
- tasks = Kind(kind, kind_path, config).load_tasks(params, {})
- return {
- task.task['metadata']['name']: task
- for task in tasks
- }
-
- toolchains = tasks('toolchain')
+ toolchains = {l: t for l, t in full_task_graph.iteritems()
+ if t['attributes']['kind'] == 'toolchain'}
aliases = {}
- for t in toolchains.values():
- alias = t.attributes.get('toolchain-alias')
+ for l, t in toolchains.items():
+ alias = t['attributes'].get('toolchain-alias')
if alias:
- aliases['toolchain-{}'.format(alias)] = \
- t.task['metadata']['name']
+ aliases['toolchain-{}'.format(alias)] = l
for b in from_build:
user_value = b
if not b.startswith('toolchain-'):
b = 'toolchain-{}'.format(b)
task = toolchains.get(aliases.get(b, b))
if not task:
self.log(logging.ERROR, 'artifact', {'build': user_value},
'Could not find a toolchain build named `{build}`')
return 1
- task_id = optimize_task(task, {})
- artifact_name = task.attributes.get('toolchain-artifact')
+ # NOTE: if all tasks that used this toolchain were optimized away for this
+ # particular push, then the artifact task will not be found. See bug 1397847
+ # for a better approach.
+ task_id = label_to_taskid.get(task['label'])
+ artifact_name = task['attributes'].get('toolchain-artifact')
if task_id in (True, False) or not artifact_name:
self.log(logging.ERROR, 'artifact', {'build': user_value},
'Could not find artifacts for a toolchain build '
'named `{build}`')
return 1
record = ArtifactRecord(task_id, artifact_name)
records[record.filename] = record