in src/compiler/compiler.rs [408:660]
fn color_mode(&self) -> ColorMode;
/// Look up a cached compile result in `storage`. If not found, run the
/// compile and store the result.
#[allow(clippy::too_many_arguments)]
async fn get_cached_or_compile(
self: Box<Self>,
service: &server::SccacheService<T>,
dist_client: Option<Arc<dyn dist::Client>>,
creator: T,
storage: Arc<dyn Storage>,
arguments: Vec<OsString>,
cwd: PathBuf,
env_vars: Vec<(OsString, OsString)>,
cache_control: CacheControl,
pool: tokio::runtime::Handle,
) -> Result<(CompileResult, process::Output)> {
let out_pretty = self.output_pretty().into_owned();
debug!("[{}]: get_cached_or_compile: {:?}", out_pretty, arguments);
let start = Instant::now();
let may_dist = dist_client.is_some();
let rewrite_includes_only = match dist_client {
Some(ref client) => client.rewrite_includes_only(),
_ => false,
};
let result = self
.generate_hash_key(
&creator,
cwd.clone(),
env_vars,
may_dist,
&pool,
rewrite_includes_only,
storage.clone(),
cache_control,
)
.await;
debug!(
"[{}]: generate_hash_key took {}",
out_pretty,
fmt_duration_as_secs(&start.elapsed())
);
let (key, compilation, weak_toolchain_key) = match result {
Err(e) => {
return match e.downcast::<ProcessError>() {
Ok(ProcessError(output)) => Ok((CompileResult::Error, output)),
Err(e) => Err(e),
};
}
Ok(HashResult {
key,
compilation,
weak_toolchain_key,
}) => (key, compilation, weak_toolchain_key),
};
debug!("[{}]: Hash key: {}", out_pretty, key);
// If `ForceRecache` is enabled, we won't check the cache.
let start = Instant::now();
let cache_status = async {
if cache_control == CacheControl::ForceNoCache {
Ok(Cache::None)
} else if cache_control == CacheControl::ForceRecache {
Ok(Cache::Recache)
} else {
storage.get(&key).await
}
};
// Set a maximum time limit for the cache to respond before we forge
// ahead ourselves with a compilation.
let timeout = Duration::new(60, 0);
let cache_status = async {
let res = tokio::time::timeout(timeout, cache_status).await;
let duration = start.elapsed();
(res, duration)
};
// Check the result of the cache lookup.
let outputs = compilation
.outputs()
.map(|output| FileObjectSource {
path: cwd.join(output.path),
..output
})
.collect::<Vec<_>>();
let lookup = match cache_status.await {
(Ok(Ok(Cache::Hit(mut entry))), duration) => {
debug!(
"[{}]: Cache hit in {}",
out_pretty,
fmt_duration_as_secs(&duration)
);
let stdout = entry.get_stdout();
let stderr = entry.get_stderr();
let output = process::Output {
status: exit_status(0),
stdout,
stderr,
};
let hit = CompileResult::CacheHit(duration);
match entry.extract_objects(outputs.clone(), &pool).await {
Ok(()) => Ok(CacheLookupResult::Success(hit, output)),
Err(e) => {
if e.downcast_ref::<DecompressionFailure>().is_some() {
debug!("[{}]: Failed to decompress object", out_pretty);
Ok(CacheLookupResult::Miss(MissType::CacheReadError))
} else {
Err(e)
}
}
}
}
(Ok(Ok(Cache::Miss)), duration) => {
debug!(
"[{}]: Cache miss in {}",
out_pretty,
fmt_duration_as_secs(&duration)
);
Ok(CacheLookupResult::Miss(MissType::Normal))
}
(Ok(Ok(Cache::None)), duration) => {
debug!(
"[{}]: Cache none in {}",
out_pretty,
fmt_duration_as_secs(&duration)
);
Ok(CacheLookupResult::Miss(MissType::ForcedNoCache))
}
(Ok(Ok(Cache::Recache)), duration) => {
debug!(
"[{}]: Cache recache in {}",
out_pretty,
fmt_duration_as_secs(&duration)
);
Ok(CacheLookupResult::Miss(MissType::ForcedRecache))
}
(Ok(Err(err)), duration) => {
error!(
"[{}]: Cache read error: {:?} in {}",
out_pretty,
err,
fmt_duration_as_secs(&duration)
);
Ok(CacheLookupResult::Miss(MissType::CacheReadError))
}
(Err(_), duration) => {
debug!(
"[{}]: Cache timed out {}",
out_pretty,
fmt_duration_as_secs(&duration)
);
Ok(CacheLookupResult::Miss(MissType::TimedOut))
}
}?;
match lookup {
CacheLookupResult::Success(compile_result, output) => {
Ok::<_, Error>((compile_result, output))
}
CacheLookupResult::Miss(miss_type) => {
// Cache miss, so compile it.
let start = Instant::now();
let (cacheable, dist_type, compiler_result) = dist_or_local_compile(
service,
dist_client,
creator,
cwd,
compilation,
weak_toolchain_key,
out_pretty.clone(),
)
.await?;
let duration_compilation = start.elapsed();
if !compiler_result.status.success() {
debug!(
"[{}]: Compiled in {}, but failed, not storing in cache",
out_pretty,
fmt_duration_as_secs(&duration_compilation)
);
return Ok((
CompileResult::CompileFailed(dist_type, duration_compilation),
compiler_result,
));
}
if miss_type == MissType::ForcedNoCache {
// Do not cache
debug!(
"[{}]: Compiled in {}, but not caching",
out_pretty,
fmt_duration_as_secs(&duration_compilation)
);
return Ok((
CompileResult::NotCached(dist_type, duration_compilation),
compiler_result,
));
}
if cacheable != Cacheable::Yes {
// Not cacheable
debug!(
"[{}]: Compiled in {}, but not cacheable",
out_pretty,
fmt_duration_as_secs(&duration_compilation)
);
return Ok((
CompileResult::NotCacheable(dist_type, duration_compilation),
compiler_result,
));
}
debug!(
"[{}]: Compiled in {}, storing in cache",
out_pretty,
fmt_duration_as_secs(&duration_compilation)
);
let start_create_artifact = Instant::now();
let mut entry = CacheWrite::from_objects(outputs, &pool)
.await
.context("failed to zip up compiler outputs")?;
entry.put_stdout(&compiler_result.stdout)?;
entry.put_stderr(&compiler_result.stderr)?;
debug!(
"[{}]: Created cache artifact in {}",
out_pretty,
fmt_duration_as_secs(&start_create_artifact.elapsed())
);
let out_pretty2 = out_pretty.clone();
// Try to finish storing the newly-written cache
// entry. We'll get the result back elsewhere.
let future = async move {
let start = Instant::now();
match storage.put(&key, entry).await {
Ok(_) => {
debug!("[{}]: Stored in cache successfully!", out_pretty2);
Ok(CacheWriteInfo {
object_file_pretty: out_pretty2,
duration: start.elapsed(),
})
}
Err(e) => Err(e),
}
};
let future = Box::pin(future);
Ok((
CompileResult::CacheMiss(miss_type, dist_type, duration_compilation, future),
compiler_result,
))
}
}
.with_context(|| format!("failed to store `{}` to cache", out_pretty))
}