in GVFS/GVFS.Common/Prefetch/BlobPrefetcher.cs [215:389]
public void PrefetchWithStats(
string branchOrCommit,
bool isBranch,
bool hydrateFilesAfterDownload,
out int matchedBlobCount,
out int downloadedBlobCount,
out int hydratedFileCount)
{
matchedBlobCount = 0;
downloadedBlobCount = 0;
hydratedFileCount = 0;
if (string.IsNullOrWhiteSpace(branchOrCommit))
{
throw new FetchException("Must specify branch or commit to fetch");
}
GitRefs refs = null;
string commitToFetch;
if (isBranch)
{
refs = this.ObjectRequestor.QueryInfoRefs(branchOrCommit);
if (refs == null)
{
throw new FetchException("Could not query info/refs from: {0}", this.Enlistment.RepoUrl);
}
else if (refs.Count == 0)
{
throw new FetchException("Could not find branch {0} in info/refs from: {1}", branchOrCommit, this.Enlistment.RepoUrl);
}
commitToFetch = refs.GetTipCommitId(branchOrCommit);
}
else
{
commitToFetch = branchOrCommit;
}
this.DownloadMissingCommit(commitToFetch, this.GitObjects);
// For FastFetch only, examine the shallow file to determine the previous commit that had been fetched
string shallowFile = Path.Combine(this.Enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Shallow);
string previousCommit = null;
// Use the shallow file to find a recent commit to diff against to try and reduce the number of SHAs to check.
if (File.Exists(shallowFile))
{
previousCommit = File.ReadAllLines(shallowFile).Where(line => !string.IsNullOrWhiteSpace(line)).LastOrDefault();
if (string.IsNullOrWhiteSpace(previousCommit))
{
this.Tracer.RelatedError("Shallow file exists, but contains no valid SHAs.");
this.HasFailures = true;
return;
}
}
BlockingCollection<string> availableBlobs = new BlockingCollection<string>();
////
// First create the pipeline
//
// diff ---> blobFinder ---> downloader ---> packIndexer
// | | | |
// ------------------------------------------------------> fileHydrator
////
// diff
// Inputs:
// * files/folders
// * commit id
// Outputs:
// * RequiredBlobs (property): Blob ids required to satisfy desired paths
// * FileAddOperations (property): Repo-relative paths corresponding to those blob ids
DiffHelper diff = new DiffHelper(this.Tracer, this.Enlistment, this.FileList, this.FolderList, includeSymLinks: false);
// blobFinder
// Inputs:
// * requiredBlobs (in param): Blob ids from output of `diff`
// Outputs:
// * availableBlobs (out param): Locally available blob ids (shared between `blobFinder`, `downloader`, and `packIndexer`, all add blob ids to the list as they are locally available)
// * MissingBlobs (property): Blob ids that are missing and need to be downloaded
// * AvailableBlobs (property): Same as availableBlobs
FindBlobsStage blobFinder = new FindBlobsStage(this.SearchThreadCount, diff.RequiredBlobs, availableBlobs, this.Tracer, this.Enlistment);
// downloader
// Inputs:
// * missingBlobs (in param): Blob ids from output of `blobFinder`
// Outputs:
// * availableBlobs (out param): Loose objects that have completed downloading (shared between `blobFinder`, `downloader`, and `packIndexer`, all add blob ids to the list as they are locally available)
// * AvailableObjects (property): Same as availableBlobs
// * AvailablePacks (property): Packfiles that have completed downloading
BatchObjectDownloadStage downloader = new BatchObjectDownloadStage(this.DownloadThreadCount, this.ChunkSize, blobFinder.MissingBlobs, availableBlobs, this.Tracer, this.Enlistment, this.ObjectRequestor, this.GitObjects);
// packIndexer
// Inputs:
// * availablePacks (in param): Packfiles that have completed downloading from output of `downloader`
// Outputs:
// * availableBlobs (out param): Blobs that have completed downloading and indexing (shared between `blobFinder`, `downloader`, and `packIndexer`, all add blob ids to the list as they are locally available)
IndexPackStage packIndexer = new IndexPackStage(this.IndexThreadCount, downloader.AvailablePacks, availableBlobs, this.Tracer, this.GitObjects);
// fileHydrator
// Inputs:
// * workingDirectoryRoot (in param): the root of the working directory where hydration takes place
// * blobIdsToPaths (in param): paths of all blob ids that need to be hydrated from output of `diff`
// * availableBlobs (in param): blobs id that are available locally, from whatever source
// Outputs:
// * Hydrated files on disk.
HydrateFilesStage fileHydrator = new HydrateFilesStage(Environment.ProcessorCount * 2, this.Enlistment.WorkingDirectoryRoot, diff.FileAddOperations, availableBlobs, this.Tracer);
// All the stages of the pipeline are created and wired up, now kick them off in the proper sequence
ThreadStart performDiff = () =>
{
diff.PerformDiff(previousCommit, commitToFetch);
this.HasFailures |= diff.HasFailures;
};
if (hydrateFilesAfterDownload)
{
// Call synchronously to ensure that diff.FileAddOperations
// is completely populated when fileHydrator starts
performDiff();
}
else
{
new Thread(performDiff).Start();
}
blobFinder.Start();
downloader.Start();
if (hydrateFilesAfterDownload)
{
fileHydrator.Start();
}
// If indexing happens during searching, searching progressively gets slower, so wait on searching before indexing.
blobFinder.WaitForCompletion();
this.HasFailures |= blobFinder.HasFailures;
packIndexer.Start();
downloader.WaitForCompletion();
this.HasFailures |= downloader.HasFailures;
packIndexer.WaitForCompletion();
this.HasFailures |= packIndexer.HasFailures;
availableBlobs.CompleteAdding();
if (hydrateFilesAfterDownload)
{
fileHydrator.WaitForCompletion();
this.HasFailures |= fileHydrator.HasFailures;
}
matchedBlobCount = blobFinder.AvailableBlobCount + blobFinder.MissingBlobCount;
downloadedBlobCount = blobFinder.MissingBlobCount;
hydratedFileCount = fileHydrator.ReadFileCount;
if (!this.SkipConfigUpdate && !this.HasFailures)
{
this.UpdateRefs(branchOrCommit, isBranch, refs);
if (isBranch)
{
this.HasFailures |= !this.UpdateRefSpec(this.Tracer, this.Enlistment, branchOrCommit, refs);
}
}
if (!this.HasFailures)
{
this.SavePrefetchArgs(commitToFetch, hydrateFilesAfterDownload);
}
}