in integration_test/gce-testing-internal/gce/gce_testing.go [896:934]
func UploadContent(ctx context.Context, logger *log.Logger, vm *VM, content io.Reader, remotePath string) (err error) {
defer func() {
if err != nil {
logger.Printf("Uploading file finished with err=%v", err)
}
}()
object := storageClient.Bucket(transfersBucket).Object(path.Join(vm.Name, remotePath))
writer := object.NewWriter(ctx)
_, copyErr := io.Copy(writer, content)
// We have to make sure to call Close() here in order to tell it to finish
// the upload operation.
closeErr := writer.Close()
err = multierr.Combine(copyErr, closeErr)
if err != nil {
return fmt.Errorf("UploadContent() could not write data into storage object: %v", err)
}
// Make sure to clean up the object once we're done with it.
// Note: if the preceding io.Copy() or writer.Close() fails, the object will
// not be uploaded and there is no need to delete it:
// https://cloud.google.com/storage/docs/resumable-uploads#introduction
// (note that the go client libraries use resumable uploads).
defer func() {
deleteErr := object.Delete(ctx)
if deleteErr != nil {
err = fmt.Errorf("UploadContent() finished with err=%v, then cleanup of %v finished with err=%v", err, object.ObjectName(), deleteErr)
}
}()
if IsWindows(vm.ImageSpec) {
_, err = RunRemotely(ctx, logger, vm, fmt.Sprintf(`New-Item -Path "%s" -ItemType File -Force ;Read-GcsObject -Force -Bucket "%s" -ObjectName "%s" -OutFile "%s"`, remotePath, object.BucketName(), object.ObjectName(), remotePath))
return err
}
if err := InstallGsutilIfNeeded(ctx, logger, vm); err != nil {
return err
}
objectPath := fmt.Sprintf("gs://%s/%s", object.BucketName(), object.ObjectName())
_, err = RunRemotely(ctx, logger, vm, fmt.Sprintf("sudo gsutil cp '%s' '%s'", objectPath, remotePath))
return err
}