--- a/python/mozbuild/mozbuild/artifacts.py
+++ b/python/mozbuild/mozbuild/artifacts.py
@@ -123,48 +123,55 @@ class ArtifactJob(object):
('bin/xpcshell', ('bin', 'bin')),
('bin/plugins/*', ('bin/plugins', 'plugins'))
}
# We can tell our input is a test archive by this suffix, which happens to
# be the same across platforms.
_test_archive_suffix = '.common.tests.zip'
- def __init__(self, package_re, tests_re, log=None):
+ def __init__(self, package_re, tests_re, log=None, download_symbols=False):
self._package_re = re.compile(package_re)
self._tests_re = None
if tests_re:
self._tests_re = re.compile(tests_re)
self._log = log
+ self._symbols_archive_suffix = None
+ if download_symbols:
+ self._symbols_archive_suffix = 'crashreporter-symbols.zip'
def log(self, *args, **kwargs):
if self._log:
self._log(*args, **kwargs)
def find_candidate_artifacts(self, artifacts):
# TODO: Handle multiple artifacts, taking the latest one.
tests_artifact = None
for artifact in artifacts:
name = artifact['name']
if self._package_re and self._package_re.match(name):
yield name
elif self._tests_re and self._tests_re.match(name):
tests_artifact = name
yield name
+ elif self._symbols_archive_suffix and name.endswith(self._symbols_archive_suffix):
+ yield name
else:
self.log(logging.DEBUG, 'artifact',
{'name': name},
'Not yielding artifact named {name} as a candidate artifact')
if self._tests_re and not tests_artifact:
raise ValueError('Expected tests archive matching "{re}", but '
'found none!'.format(re=self._tests_re))
def process_artifact(self, filename, processed_filename):
if filename.endswith(ArtifactJob._test_archive_suffix) and self._tests_re:
return self.process_tests_artifact(filename, processed_filename)
+ if self._symbols_archive_suffix and filename.endswith(self._symbols_archive_suffix):
+ return self.process_symbols_archive(filename, processed_filename)
return self.process_package_artifact(filename, processed_filename)
def process_package_artifact(self, filename, processed_filename):
raise NotImplementedError("Subclasses must specialize process_package_artifact!")
def process_tests_artifact(self, filename, processed_filename):
added_entry = False
@@ -183,16 +190,25 @@ class ArtifactJob(object):
writer.add(destpath.encode('utf-8'), reader[filename], mode=mode)
added_entry = True
if not added_entry:
raise ValueError('Archive format changed! No pattern from "{patterns}"'
'matched an archive path.'.format(
patterns=LinuxArtifactJob.test_artifact_patterns))
+ def process_symbols_archive(self, filename, processed_filename):
+ with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
+ reader = JarReader(filename)
+ for filename in reader.entries:
+ destpath = mozpath.join('crashreporter-symbols', filename)
+ self.log(logging.INFO, 'artifact',
+ {'destpath': destpath},
+ 'Adding {destpath} to processed archive')
+ writer.add(destpath.encode('utf-8'), reader[filename])
class AndroidArtifactJob(ArtifactJob):
package_artifact_patterns = {
'application.ini',
'platform.ini',
'**/*.so',
'**/interfaces.xpt',
@@ -425,19 +441,19 @@ JOB_DETAILS = {
'win64': (WinArtifactJob, ('public/build/firefox-(.*)\.win64.zip',
'public/build/firefox-(.*)\.common\.tests\.zip')),
'win64-debug': (WinArtifactJob, ('public/build/firefox-(.*)\.win64.zip',
'public/build/firefox-(.*)\.common\.tests\.zip')),
}
-def get_job_details(job, log=None):
+def get_job_details(job, log=None, download_symbols=False):
cls, (package_re, tests_re) = JOB_DETAILS[job]
- return cls(package_re, tests_re, log=log)
+ return cls(package_re, tests_re, log=log, download_symbols=download_symbols)
def cachedmethod(cachefunc):
'''Decorator to wrap a class or instance method with a memoizing callable that
saves results in a (possibly shared) cache.
'''
def decorator(method):
def wrapper(self, *args, **kwargs):
mapping = cachefunc(self)
@@ -606,19 +622,19 @@ class TaskCache(CacheManager):
'''Map candidate pushheads to Task Cluster task IDs and artifact URLs.'''
def __init__(self, cache_dir, log=None, skip_cache=False):
CacheManager.__init__(self, cache_dir, 'artifact_url', MAX_CACHED_TASKS, log=log, skip_cache=skip_cache)
self._index = taskcluster.Index()
self._queue = taskcluster.Queue()
@cachedmethod(operator.attrgetter('_cache'))
- def artifact_urls(self, tree, job, rev):
+ def artifact_urls(self, tree, job, rev, download_symbols):
try:
- artifact_job = get_job_details(job, log=self._log)
+ artifact_job = get_job_details(job, log=self._log, download_symbols=download_symbols)
except KeyError:
self.log(logging.INFO, 'artifact',
{'job': job},
'Unknown job {job}')
raise KeyError("Unknown job")
key = '{rev}.{tree}.{job}'.format(rev=rev, tree=tree, job=job)
try:
@@ -743,28 +759,29 @@ class Artifacts(object):
def __init__(self, tree, substs, defines, job=None, log=None,
cache_dir='.', hg=None, git=None, skip_cache=False,
topsrcdir=None):
if (hg and git) or (not hg and not git):
raise ValueError("Must provide path to exactly one of hg and git")
self._substs = substs
+ self._download_symbols = self._substs.get('MOZ_ARTIFACT_BUILD_SYMBOLS', False)
self._defines = defines
self._tree = tree
self._job = job or self._guess_artifact_job()
self._log = log
self._hg = hg
self._git = git
self._cache_dir = cache_dir
self._skip_cache = skip_cache
self._topsrcdir = topsrcdir
try:
- self._artifact_job = get_job_details(self._job, log=self._log)
+ self._artifact_job = get_job_details(self._job, log=self._log, download_symbols=self._download_symbols)
except KeyError:
self.log(logging.INFO, 'artifact',
{'job': self._job},
'Unknown job {job}')
raise KeyError("Unknown job")
self._task_cache = TaskCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log, skip_cache=self._skip_cache)
@@ -909,17 +926,17 @@ class Artifacts(object):
try:
trees = tree_cache.artifact_trees(pushhead)
except ValueError:
return None
trees.sort()
for tree in trees:
try:
- urls = task_cache.artifact_urls(tree, job, pushhead)
+ urls = task_cache.artifact_urls(tree, job, pushhead, self._download_symbols)
except ValueError:
continue
if urls:
self.log(logging.INFO, 'artifact',
{'pushhead': pushhead,
'tree': tree},
'Installing from remote pushhead {pushhead} on {tree}')
return urls