in getdeps/fetch.go [86:153]
func fetchAndVerify(label, projectDir, urlStr string, hashMode HashMode, hash *string, urlOverrides *URLOverrides) ([]byte, os.FileInfo, error) {
if urlOverrides != nil {
urlStr = urlOverrides.Override(urlStr)
}
u, err := url.Parse(urlStr)
if err != nil {
return nil, nil, fmt.Errorf("%s: invalid URL %q: %w", label, urlStr, err)
}
if strings.ToLower(u.Scheme) == "file" {
filePath := path.Join(projectDir, u.Host, u.Path)
fileInfo, err := os.Stat(filePath)
if err != nil {
return nil, nil, err
}
data, err := ioutil.ReadFile(filePath)
return data, fileInfo, err
}
switch hashMode {
case hashModeStrict:
if hash == nil || *hash == "" {
return nil, nil, fmt.Errorf("%s: %s: hash mode is strict and no hash supplied", label, urlStr)
}
case hashModeUpdate:
if hash != nil {
*hash = ""
}
case hashModePermissive:
// Proceed
}
var data []byte
if hash != nil {
var actualHash string
// blindly retry to downloading the file when hash check fails. This is
// to work around an odd behaviour of the GNU mirrors where the files
// are updated but their content is wrong for a few seconds (e.g. the
// tar.gz file with tar'ed but not gzip'ed content, like it's being
// compressed in prod).
for attempts := 0; attempts < 3; attempts++ {
data, err = fetch(label, urlStr)
if err != nil {
return nil, nil, err
}
actualHash, err = verifyHash(data, *hash)
if err != nil {
log.Printf("Hash validation for %s failed, will try downloading the file again. Error is: %v", label, err)
continue
}
if *hash == "" {
*hash = actualHash
log.Printf("%s: Hash %s", label, actualHash)
} else {
log.Printf("%s: Hash %s (verified)", label, actualHash)
}
return data, nil, nil
}
// at this point err is `nil` if the last attempt was successful,
// and not `nil` otherwise.
return data, nil, err
}
data, err = fetch(label, urlStr)
return data, nil, err
}