in src/storage.rs [2581:2626]
fn unpack_package(tarball: &File, unpack_dir: &Path) -> Result<(), UnpackError> {
// If we get here and the unpack_dir exists, this implies we had a previously failed fetch,
// blast it away so we can have a clean slate!
if unpack_dir.exists() {
fs::remove_dir_all(unpack_dir)?;
}
fs::create_dir(unpack_dir)?;
let gz = GzDecoder::new(tarball);
let mut tar = Archive::new(gz);
let prefix = unpack_dir.file_name().unwrap();
let parent = unpack_dir.parent().unwrap();
for entry in tar.entries()? {
let mut entry = entry.map_err(UnpackError::ArchiveIterate)?;
let entry_path = entry
.path()
.map_err(UnpackError::ArchiveEntry)?
.into_owned();
// We're going to unpack this tarball into the global source
// directory, but we want to make sure that it doesn't accidentally
// (or maliciously) overwrite source code from other crates. Cargo
// itself should never generate a tarball that hits this error, and
// crates.io should also block uploads with these sorts of tarballs,
// but be extra sure by adding a check here as well.
if !entry_path.starts_with(prefix) {
return Err(UnpackError::InvalidPaths {
entry_path,
prefix: prefix.to_owned(),
});
}
entry
.unpack_in(parent)
.map_err(|error| UnpackError::Unpack {
entry_path: entry_path.clone(),
error,
})?;
}
create_unpack_lock(unpack_dir).map_err(|error| UnpackError::LockCreate {
target: unpack_dir.to_owned(),
error,
})?;
Ok(())
}