in mozdownload/treeherder.py [0:0]
def query_builds_by_revision(self, revision, job_type_name='Build', debug_build=False):
"""Retrieve build folders for a given revision with the help of Treeherder.
:param revision: Revision of the build to download.
:param job_type_name: Name of the job to look for. For builds it should be
'Build', 'Nightly', and 'L10n Nightly'. Defaults to `Build`.
:param debug_build: Download a debug build.
"""
builds = set()
try:
self.logger.info('Querying {url} for list of builds for revision: {revision}'.format(
url=self.client.server_url, revision=revision))
# Retrieve the option hash to filter for type of build (opt, and debug for now)
option_hash = None
for key, values in self.client.get_option_collection_hash().items():
for value in values:
if value['name'] == ('debug' if debug_build else 'opt'):
option_hash = key
break
if option_hash:
break
resultsets = self.client.get_pushes(self.branch, revision=revision)
# Set filters to speed-up querying jobs
kwargs = {
'option_collection_hash': option_hash,
'job_type_name': job_type_name,
'exclusion_profile': False,
}
kwargs.update(self.get_treeherder_platform(self.platform))
for resultset in resultsets:
kwargs.update({'result_set_id': resultset['id']})
jobs = self.client.get_jobs(self.branch, **kwargs)
for job in jobs:
log_urls = self.client.get_job_log_url(self.branch, job_id=job['id'])
for log_url in log_urls:
if self.application in log_url['url']:
self.logger.debug('Found build folder: {}'.format(log_url['url']))
builds.update([log_url['url']])
except Exception:
self.logger.exception('Failure occurred when querying Treeherder for builds')
return list(builds)