in lib/ramble/spack/stage.py [0:0]
def get_checksums_for_versions(url_dict, name, **kwargs):
"""Fetches and checksums archives from URLs.
This function is called by both ``spack checksum`` and ``spack
create``. The ``first_stage_function`` argument allows the caller to
inspect the first downloaded archive, e.g., to determine the build
system.
Args:
url_dict (dict): A dictionary of the form: version -> URL
name (str): The name of the package
first_stage_function (typing.Callable): function that takes a Stage and a URL;
this is run on the stage of the first URL downloaded
keep_stage (bool): whether to keep staging area when command completes
batch (bool): whether to ask user how many versions to fetch (false)
or fetch all versions (true)
latest (bool): whether to take the latest version (true) or all (false)
fetch_options (dict): Options used for the fetcher (such as timeout
or cookies)
Returns:
(str): A multi-line string containing versions and corresponding hashes
"""
batch = kwargs.get('batch', False)
fetch_options = kwargs.get('fetch_options', None)
first_stage_function = kwargs.get('first_stage_function', None)
keep_stage = kwargs.get('keep_stage', False)
latest = kwargs.get('latest', False)
sorted_versions = sorted(url_dict.keys(), reverse=True)
if latest:
sorted_versions = sorted_versions[:1]
# Find length of longest string in the list for padding
max_len = max(len(str(v)) for v in sorted_versions)
num_ver = len(sorted_versions)
tty.msg('Found {0} version{1} of {2}:'.format(
num_ver, '' if num_ver == 1 else 's', name),
'',
*llnl.util.lang.elide_list(
['{0:{1}} {2}'.format(str(v), max_len, url_dict[v])
for v in sorted_versions]))
print()
if batch or latest:
archives_to_fetch = len(sorted_versions)
else:
archives_to_fetch = tty.get_number(
"How many would you like to checksum?", default=1, abort='q')
if not archives_to_fetch:
tty.die("Aborted.")
versions = sorted_versions[:archives_to_fetch]
urls = [url_dict[v] for v in versions]
tty.debug('Downloading...')
version_hashes = []
i = 0
errors = []
for url, version in zip(urls, versions):
# Wheels should not be expanded during staging
expand_arg = ''
if url.endswith('.whl') or '.whl#' in url:
expand_arg = ', expand=False'
try:
if fetch_options:
url_or_fs = fs.URLFetchStrategy(
url, fetch_options=fetch_options)
else:
url_or_fs = url
with Stage(url_or_fs, keep=keep_stage) as stage:
# Fetch the archive
stage.fetch()
if i == 0 and first_stage_function:
# Only run first_stage_function the first time,
# no need to run it every time
first_stage_function(stage, url)
# Checksum the archive and add it to the list
version_hashes.append((version, spack.util.crypto.checksum(
hashlib.sha256, stage.archive_file)))
i += 1
except FailedDownloadError:
errors.append('Failed to fetch {0}'.format(url))
except Exception as e:
tty.msg('Something failed on {0}, skipping. ({1})'.format(url, e))
for msg in errors:
tty.debug(msg)
if not version_hashes:
tty.die("Could not fetch any versions for {0}".format(name))
# Find length of longest string in the list for padding
max_len = max(len(str(v)) for v, h in version_hashes)
# Generate the version directives to put in a package.py
version_lines = "\n".join([
" version('{0}', {1}sha256='{2}'{3})".format(
v, ' ' * (max_len - len(str(v))), h, expand_arg) for v, h in version_hashes
])
num_hash = len(version_hashes)
tty.debug('Checksummed {0} version{1} of {2}:'.format(
num_hash, '' if num_hash == 1 else 's', name))
return version_lines