in src/main.rs [2432:2486]
fn cmd_aggregate(
out: &Arc<dyn Out>,
cfg: &PartialConfig,
sub_args: &AggregateArgs,
) -> Result<(), miette::Report> {
let network =
Network::acquire(cfg).ok_or_else(|| miette!("cannot aggregate imports when --frozen"))?;
let mut urls = Vec::new();
{
let sources_file = BufReader::new(
File::open(&sub_args.sources)
.into_diagnostic()
.wrap_err("failed to open sources file")?,
);
for line_result in sources_file.lines() {
let line = line_result
.into_diagnostic()
.wrap_err("failed to read sources file")?;
let trimmed = line.trim();
if trimmed.is_empty() || trimmed.starts_with('#') {
// Ignore comment and empty lines.
continue;
}
urls.push(
Url::parse(trimmed)
.into_diagnostic()
.wrap_err_with(|| format!("failed to parse url: {trimmed:?}"))?,
);
}
}
let progress_bar = progress_bar("Fetching", "source audits", urls.len() as u64);
let sources = tokio::runtime::Handle::current()
.block_on(try_join_all(urls.into_iter().map(|url| async {
let _guard = IncProgressOnDrop(&progress_bar, 1);
let url_string = url.to_string();
let audit_bytes = network.download(url).await?;
let audit_string = String::from_utf8(audit_bytes).map_err(LoadTomlError::from)?;
let audit_source = SourceFile::new(&url_string, audit_string);
// We use foreign audit file parsing when loading sources to
// aggregate, so that we catch and emit warnings when aggregation
// fails, and don't generate invalid aggregated audit files.
let audit_file =
storage::foreign_audit_source_to_local_warn(&url_string, audit_source)?;
Ok::<_, FetchAuditError>((url_string, audit_file))
})))
.into_diagnostic()?;
let merged_audits = do_aggregate_audits(sources).into_diagnostic()?;
let document = serialization::to_formatted_toml(merged_audits, None).into_diagnostic()?;
write!(out, "{document}");
Ok(())
}