in Public/Src/Engine/Scheduler/Fingerprints/ObservedInputProcessor.cs [137:791]
internal static async Task<ObservedInputProcessingResult> ProcessInternalAsync<TTarget, TEnv, TObservation>(
OperationContext operationContext,
TEnv environment,
TTarget target,
CacheablePipInfo pip,
ReadOnlyArray<TObservation> observations,
[CanBeNull] IReadOnlyDictionary<AbsolutePath, IReadOnlyCollection<FileArtifactWithAttributes>> unPopulatedSharedOpaqueOutputs,
IReadOnlyCollection<AbsolutePath> createdDirectories,
SortedReadOnlyArray<StringId, CaseInsensitiveStringIdComparer> observedAccessedFileNames,
bool isCacheLookup,
bool trackFileChanges = true)
where TTarget : struct, IObservedInputProcessingTarget<TObservation>
where TEnv : IObservedInputProcessingEnvironment
{
Contract.Requires(!isCacheLookup ^ observedAccessedFileNames.IsValid);
Contract.RequiresNotNull(createdDirectories);
var envAdapter = environment as ObservedInputProcessingEnvironmentAdapter;
using (operationContext.StartOperation(PipExecutorCounter.ObservedInputProcessorProcessInternalDuration))
using (var processingState = ObservedInputProcessingState.GetInstance())
{
var pathTable = environment.Context.PathTable;
PathTable.ExpandedAbsolutePathComparer pathComparer = pathTable.ExpandedPathComparer;
ReadOnlyArray<DirectoryArtifact> directoryDependencies = pip.DirectoryDependencies;
int numAbsentPathsEliminated = 0;
var sourceDirectoriesAllDirectories = processingState.SourceDirectoriesAllDirectories;
var sourceDirectoriesTopDirectoryOnly = processingState.SourceDirectoriesTopDirectoryOnly;
var dynamicObservations = processingState.DynamicObservations;
var allowedUndeclaredSourceReads = processingState.AllowedUndeclaredReads;
var directoryDependencyContentsFilePaths = processingState.DirectoryDependencyContentsFilePaths;
var enumeratedDirectories = processingState.EnumeratedDirectories;
var searchPaths = processingState.SearchPaths;
var bitSetSize = BitSet.RoundToValidBitCount(observations.Length);
var observationsUnderSourceSealDirectories = new BitSet(bitSetSize);
observationsUnderSourceSealDirectories.SetLength(bitSetSize);
var outOfSourceSealObservations = new BitSet(bitSetSize);
outOfSourceSealObservations.SetLength(bitSetSize);
HashSet<AbsolutePath> possiblyBadAccesses;
HashSet<HierarchicalNameId> pipFileSystemViewPathIds = null;
var allowUndeclaredSourceReads = pip.UnderlyingPip.ProcessAllowsUndeclaredSourceReads;
// Compute the set of all shared dynamic outputs. This is only done if there is a chance we end up using MinimalGraphWithAlienFiles
// for this pip, otherwise we keep the set empty to avoid unnecessary computations
HashSet<AbsolutePath> sharedOpaqueOutputs = processingState.SharedOpaqueOutputs;
if (unPopulatedSharedOpaqueOutputs != null && envAdapter.MayDetermineMinimalGraphWithAlienFiles(allowUndeclaredSourceReads))
{
// We filter out artifacts that are allowed file rewrites since that information is not available
// when processing a prior path set. The final result will be that allowed rewrites, even though outputs,
// will be part of the directory fingerprint when using minimal with alien files. This is the desired outcome
// since those files existed before the build begun.
sharedOpaqueOutputs.AddRange(unPopulatedSharedOpaqueOutputs.Values.SelectMany(fileArtifacts =>
fileArtifacts.Where(fa => !fa.IsUndeclaredFileRewrite).Select(fa => fa.Path)));
}
using (operationContext.StartOperation(PipExecutorCounter.ObservedInputProcessorPreProcessDuration))
{
using (operationContext.StartOperation(PipExecutorCounter.ObservedInputProcessorPreProcessListDirectoriesDuration))
{
for (int i = 0; i < directoryDependencies.Length; i++)
{
var directoryDependency = directoryDependencies[i];
var listDirectoryContents = environment.ListSealedDirectoryContents(directoryDependency);
if (!listDirectoryContents.BaseArray.IsValid)
{
// TODO: Debugging for a one-off crash.
Contract.Assume(listDirectoryContents.BaseArray.IsValid, I($"Environment failed to retrieve directory contents for directory:'{directoryDependency.Path.ToString(pathTable)}'. Directory dependency IsSharedOpaque:{directoryDependency.IsSharedOpaque}. PartialSealId:{directoryDependency.PartialSealId}. Base Array was invalid but SortedReadOnlyArray was valid."));
}
if (!listDirectoryContents.IsValid)
{
// TODO: Debugging for a one-off crash that we aren't sure still exists.
Contract.Assume(listDirectoryContents.IsValid, I($"Environment failed to retrieve directory contents for directory:'{directoryDependency.Path.ToString(pathTable)}'. Directory dependency IsSharedOpaque:{directoryDependency.IsSharedOpaque}. PartialSealId:{directoryDependency.PartialSealId} "));
}
directoryDependencyContentsFilePaths.UnionWith(listDirectoryContents.Select(f => f.Path));
if (environment.IsSourceSealedDirectory(directoryDependency, out var allDirectories, out var patterns))
{
if (allDirectories)
{
sourceDirectoriesAllDirectories.Add(new SourceSealWithPatterns(directoryDependency.Path, patterns, false));
}
else
{
sourceDirectoriesTopDirectoryOnly.Add(new SourceSealWithPatterns(directoryDependency.Path, patterns, true));
}
}
}
}
// We have observed path accesses, but need to match them up to file artifacts.
// We can do this with OrdinalPathOnlyFileArtifactComparer and arbitrary write counts
// TODO: This is a very silly conversion. Should re-implement the needed comparer things in a new IBinaryComparer<TLeft, TRight>
// and make a FileArtifact - AbsolutePath comparer.
var observationArtifacts = processingState.ObservationArtifacts;
using (operationContext.StartOperation(PipExecutorCounter.ObservedInputProcessorPreProcessValidateSealSourceDuration))
{
for (int i = 0; i < observations.Length; i++)
{
AbsolutePath path = target.GetPathOfObservation(observations[i]);
if (directoryDependencyContentsFilePaths.Contains(path))
{
// Path is explicitly mentioned in a seal directory dependency's contents
// no need to check if it is under a source seal directory
continue;
}
// Check to see if the observation is under a topDirectoryOnly Sealed Source Directories
bool underSealedSource = false;
for (int j = 0; j < sourceDirectoriesTopDirectoryOnly.Count && !underSealedSource; j++)
{
var sourceSealWithPatterns = sourceDirectoriesTopDirectoryOnly[j];
if (sourceSealWithPatterns.Contains(pathTable, path))
{
underSealedSource = true;
observationsUnderSourceSealDirectories.Add(i);
}
else if (path == sourceSealWithPatterns.Path)
{
// Consider the sealed directory itself as a dynamic observation as this likely
// is an enumeration we want to capture for caching
observationsUnderSourceSealDirectories.Add(i);
}
}
// Check to see if the observation is under one of the AllDirectory Sealed Source Directories
for (int j = 0; j < sourceDirectoriesAllDirectories.Count && !underSealedSource; j++)
{
var sourceSealWithPatterns = sourceDirectoriesAllDirectories[j];
if (sourceSealWithPatterns.Contains(pathTable, path))
{
// Note the directories themselves are never part of the seal.
underSealedSource = true;
observationsUnderSourceSealDirectories.Add(i);
}
else if (path == sourceSealWithPatterns.Path)
{
// Consider the sealed directory itself as a dynamic observation as this likely
// is an enumeration we want to capture for caching
observationsUnderSourceSealDirectories.Add(i);
}
}
if (!underSealedSource)
{
observationArtifacts.Add(path);
}
}
}
// Note that we validated the sort order of 'observations' above w.r.t expanded order, but need
// a fake artifacts sorted ordinally.
// TODO: We can remove this step if ListSealedDirectoryContents is changed to returned expansion-sorted results.
possiblyBadAccesses = observationArtifacts;
}
// Processed results.
var observationInfos = new ObservationInfo[observations.Length];
ObservedInput[] observedInputs = new ObservedInput[observations.Length];
ObservedInputProcessingStatus status = ObservedInputProcessingStatus.Success;
int valid = 0;
int invalid = 0;
bool minimalGraphUsed = false;
// Do the processing in 2 passes.
// First pass: obtain paths and start all hashing tasks.
// Second pass: Do the actual processing.
// Having 2 passes allows us to avoid Parallel.ForEach with all required locking.
using (operationContext.StartOperation(PipExecutorCounter.ObservedInputProcessorPass1InitializeObservationInfosDuration))
{
for (int i = 0; i < observations.Length; i++)
{
observationInfos[i] = GetObservationInfo(environment, target, observations[i], allowUndeclaredSourceReads);
}
}
ObservedInputType[] observationTypes = new ObservedInputType[observations.Length];
// Observations which fail access checks are suppressed (i.e. they do not contribute to
// the fingerprint).
bool[] isUnsuppressedObservation = new bool[observations.Length];
int numAbsentPathProbes = 0, numFileContentReads = 0, numDirectoryEnumerations = 0;
int numExistingDirectoryProbes = 0, numExistingFileProbes = 0;
using (operationContext.StartOperation(PipExecutorCounter.ObservedInputProcessorPass2ProcessObservationInfosDuration))
{
// Start the second pass
for (int i = 0; i < observations.Length; i++)
{
TObservation observation = observations[i];
ObservationInfo observationInfo = observationInfos[i];
AbsolutePath path = observationInfo.Path;
ObservationFlags observationFlags = observationInfo.ObservationFlags;
FileArtifact fakeArtifact = FileArtifact.CreateSourceFile(path);
FileContentInfo? pathContentInfo;
using (operationContext.StartOperation(
PipExecutorCounter.ObservedInputProcessorTryQuerySealedInputContentDuration,
fakeArtifact))
{
pathContentInfo = await observationInfos[i].FileContentInfoTask;
}
// TODO: Don't use UntrackedFile for this...
if (pathContentInfo.HasValue && !pathContentInfo.Value.HasKnownLength &&
pathContentInfo.Value.Hash == WellKnownContentHashes.UntrackedFile)
{
// This is a HashSourceFile failure.
var mountInfo = environment.PathExpander.GetSemanticPathInfo(path);
Logger.Log.AbortObservedInputProcessorBecauseFileUntracked(
operationContext,
pip.Description,
path.ToString(pathTable),
mountInfo.RootName.IsValid ?
mountInfo.RootName.ToString(environment.Context.StringTable) :
"N/A");
status = CombineObservedInputProcessingStatus(status, ObservedInputProcessingStatus.Aborted);
invalid++;
continue;
}
bool wasPossiblyBad = possiblyBadAccesses.Contains(FileArtifact.CreateSourceFile(path));
if (!allowUndeclaredSourceReads)
{
// We do not hash the files that have been probed, so we skip the following validation for the file probes.
if (!pathContentInfo.HasValue && !wasPossiblyBad && !observationsUnderSourceSealDirectories.Contains(i) && observationFlags.IsHashingRequired())
{
Contract.Assume(
false,
"Observation is either a file or a directory found to be under a seal directory, although the file may not exist physically, " +
"or possibly bad access (probing or reading a file that is not specified as a dependency), or possibly directory enumeration. " +
GetDiagnosticsInfo(path, pip, pathTable, directoryDependencyContentsFilePaths, sourceDirectoriesTopDirectoryOnly, sourceDirectoriesAllDirectories, observationFlags));
}
}
// TODO: Right now we check TryQuerySealedOrUndeclaredInputContent, and then the VFS if that fails (we assume the two are in agreement!)
// Consider combining responsibilities so that the VFS can additionally provide per-file
// state info, such as (Existence: File, State: Sealed, Hash: 123).
// pathContentInfo may be set to AbsentFile for a directory if TreatDirectoryAsAbsentFileOnHashingInputContent flag is specified.
// We need to ensure that we recognize it is a directory so the observed input shows up as a directory enumeration
// We do this by examining the Existence of the pathContentInfo which will be set by the FileContentManager to see if the path represents
// a directory
ObservedInputType type;
if (pathContentInfo.HasValue && pathContentInfo.Value.Existence != PathExistence.ExistsAsDirectory)
{
// Path content info may be an absent file, for example, a sealed directory includes a non-existent file.
type = pathContentInfo.Value.Hash == WellKnownContentHashes.AbsentFile
? ObservedInputType.AbsentPathProbe
: ObservedInputType.FileContentRead;
}
else
{
// We tried to find FileContentInfo for the accessed path, but failed. This means that the 1) path is not part of a sealed directory,
// but *may* be part of the pip graph (just not sealed) or 2) the path did not require hashing (e.g. a probe). How we proceed here is quite delicate:
// - We make decisions based on 'existence' of the path (including if it is a file or a directory).
// - If the path is known to the pip graph, we might not have materialized it yet.
// - If the path is not known to the pip graph, it might exist in a way that is visible to build processes (e.g. a file not added to a spec).
// Specific rules for how to determine existence are an implementation detail of the IObservedInputProcessingEnvironment defined below.
Possible<PathExistence> maybeType;
using (operationContext.StartOperation(PipExecutorCounter.ObservedInputProcessorTryProbeForExistenceDuration, fakeArtifact))
{
maybeType = environment.TryProbeAndTrackForExistence(
path,
pip,
observationFlags,
isReadOnly: observationsUnderSourceSealDirectories.Contains(i),
trackPathExistence: trackFileChanges);
}
if (!maybeType.Succeeded)
{
Logger.Log.ScheduleFileAccessCheckProbeFailed(
operationContext,
pip.Description,
path.ToString(pathTable),
maybeType.Failure.DescribeIncludingInnerFailures());
ObservedInputAccessCheckFailureAction accessCheckFailureResult = target.OnAccessCheckFailure(
observation,
fromTopLevelDirectory: sourceDirectoriesTopDirectoryOnly.Any(a => a.Contains(pathTable, path, isTopDirectoryOnlyOverride: false)));
HandleFailureResult(accessCheckFailureResult, ref status, ref invalid);
continue;
}
else
{
type = MapPathExistenceToObservedInputType(pathTable, path, maybeType.Result, observationFlags);
}
}
observationTypes[i] = type;
if (wasPossiblyBad)
{
if (allowUndeclaredSourceReads)
{
allowedUndeclaredSourceReads.Add(path);
}
// reclassify ExistingFileProbe as AbsentPathProbe if
// - file doesn't actually exist on disk
// - file is under an output directory
// - lazy shared opaque output deletion is disabled
// - the declared producer of the file is a pip downstream of the prober pip
// reason:
// - access was originally classified as ExistingFileProbe despite the file being absent
// because the path is a declared ouput in the pip graph
// - however, if that path is under an opaque directory and lazy deletion is disabled,
// the file will always be scrubbed before the build so to this pip it will always be absent.
else if (
type == ObservedInputType.ExistingFileProbe &&
envAdapter?.IsLazySODeletionEnabled == false &&
!FileUtilities.FileExistsNoFollow(path.ToString(pathTable)) &&
envAdapter.IsPathUnderOutputDirectory(path, out var isItSharedOpaque) &&
isItSharedOpaque &&
envAdapter.TryGetFileProducerPip(path, out var producerPipId) &&
envAdapter.IsReachableFrom(from: pip.PipId, to: producerPipId))
{
environment.Counters.IncrementCounter(PipExecutorCounter.ExistingFileProbeReclassifiedAsAbsentForNonExistentSharedOpaqueOutput);
observationTypes[i] = ObservedInputType.AbsentPathProbe;
continue;
}
else if (type == ObservedInputType.FileContentRead || type == ObservedInputType.ExistingFileProbe)
{
ObservedInputAccessCheckFailureAction accessCheckFailureResult = target.OnAccessCheckFailure(
observation,
fromTopLevelDirectory: sourceDirectoriesTopDirectoryOnly.Any(a => a.Contains(pathTable, path, isTopDirectoryOnlyOverride: false)));
HandleFailureResult(accessCheckFailureResult, ref status, ref invalid);
continue;
}
else if (target.IsReportableUnexpectedAccess(path))
{
if (pipFileSystemViewPathIds == null)
{
// Lazily populate pipFileSystemViewPathIds if there is at least one reportable unexpected access.
pipFileSystemViewPathIds = processingState.AllDependencyPathIds;
using (operationContext.StartOperation(OperationCounter.ObservedInputProcessorComputePipFileSystemPaths))
{
foreach (var p in directoryDependencyContentsFilePaths)
{
foreach (var pathId in pathTable.EnumerateHierarchyBottomUp(p.GetParent(pathTable).Value))
{
if (!pipFileSystemViewPathIds.Add(pathId))
{
break;
}
}
}
}
}
if (!pipFileSystemViewPathIds.Contains(path.Value))
{
using (operationContext.StartOperation(OperationCounter.ObservedInputProcessorReportUnexpectedAccess))
{
// Pip does not contain the path in its file system view (as file or directory)
// Report to target but don't fail as only unexpected FileContentRead currently fails input
// processing. This is a policy decision which may be changed at some point. At which point,
// this code would need to be updated.
target.ReportUnexpectedAccess(observation, type);
}
}
}
}
isUnsuppressedObservation[i] = true;
}
DirectoryMembershipFilter searchPathFilter;
using (operationContext.StartOperation(PipExecutorCounter.ObservedInputProcessorComputeSearchPathsAndFilterDuration))
{
searchPathFilter = ComputeSearchPathsAndFilter(
ref observedAccessedFileNames,
environment.Context.PathTable,
pip,
target,
observations,
observationTypes,
isUnsuppressedObservation,
searchPaths);
}
AbsolutePath lastAbsentPath = AbsolutePath.Invalid;
// Third and final pass
for (int i = 0; i < observations.Length; i++)
{
if (!isUnsuppressedObservation[i])
{
continue;
}
TObservation observation = observations[i];
AbsolutePath path = observationInfos[i].Path;
var flags = observationInfos[i].ObservationFlags;
// Call GetAwaiter().GetResult() since we awaited above so we know the task has completed successfully
FileContentInfo? pathContentInfo = observationInfos[i].FileContentInfoTask.GetAwaiter().GetResult();
var type = observationTypes[i];
if (type == ObservedInputType.FileContentRead && !pathContentInfo.HasValue)
{
Contract.Assert(false, "If the access is a file content read, then the FileContentInfo cannot be null." +
GetDiagnosticsInfo(path, pip, pathTable, directoryDependencyContentsFilePaths, sourceDirectoriesTopDirectoryOnly, sourceDirectoriesAllDirectories, flags));
}
if (type == ObservedInputType.AbsentPathProbe)
{
// We need to iterate the observations in the order so that we can first visit the directory and then the child paths under that directory.
// CanIgnoreAbsentPathProbe relies on that assumption.
if (CanIgnoreAbsentPathProbe(environment, enumeratedDirectories, pathTable, path, lastAbsentPath, isCacheLookup))
{
numAbsentPathsEliminated++;
continue;
}
}
ObservedInput? maybeProposed;
switch (type)
{
case ObservedInputType.AbsentPathProbe:
maybeProposed = ObservedInput.CreateAbsentPathProbe(path, flags);
if (environment.IsPathUnderOutputDirectory(path)
&& !directoryDependencyContentsFilePaths.Contains(path))
{
// Record that an absent file probe occurred under the root of a known output directory
// We also exclude the probes of paths that we take a dependency on. This is done to cover the case where an upstream
// pip produces an absent file (e.g., it produced and deleted a file), and a consuming pip probes this file.
// The probe will be classified as AbsentFileProbe, and since there is a dependency between these pips, we allow the probe.
dynamicObservations.Add((path, DynamicObservationKind.AbsentPathProbeUnderOutputDirectory));
}
else
{
dynamicObservations.Add((path, DynamicObservationKind.AbsentPathProbeOutsideOutputDirectory));
}
break;
case ObservedInputType.FileContentRead:
if (pathContentInfo.Value.Hash.HashType == HashType.Unknown)
{
throw Contract.AssertFailure($"Unknown content hash for path '{path.ToString(pathTable)}' and operation {type}");
}
maybeProposed = ObservedInput.CreateFileContentRead(path, pathContentInfo.Value.Hash);
dynamicObservations.Add((path, DynamicObservationKind.ObservedFile));
break;
case ObservedInputType.ExistingFileProbe:
maybeProposed = ObservedInput.CreateExistingFileProbe(path);
dynamicObservations.Add((path, DynamicObservationKind.ProbedFile));
break;
case ObservedInputType.ExistingDirectoryProbe:
maybeProposed = ObservedInput.CreateExistingDirectoryProbe(path, flags);
// Directory probe is just like file probe.
dynamicObservations.Add((path, DynamicObservationKind.ProbedFile));
break;
case ObservedInputType.DirectoryEnumeration:
// TODO: TryQueryDirectoryFingerprint should be in agreement with the VirtualFileSystem somehow.
// Right now, both independently make decisions based on path mountpoint.
DirectoryFingerprint? maybeDirectoryFingerprint;
bool isSearchPath = searchPaths.Contains(path);
string enumeratePatternRegex;
DirectoryEnumerationMode mode;
using (operationContext.StartOperation(
PipExecutorCounter.ObservedInputProcessorTryQueryDirectoryFingerprintDuration,
DirectoryArtifact.CreateWithZeroPartialSealId(path)))
{
enumeratePatternRegex = target.GetEnumeratePatternRegex(observation);
DirectoryMembershipFilter directoryFilter;
if (enumeratePatternRegex == null)
{
// If enumeratePatternRegex is null, then isSearchPath must be true. However, for unit tests, that's not true.
// That's why, I still keep the AllowAllFilter here.
// TODO: Add an assertion here, Assert(isSearchPath).
directoryFilter = isSearchPath ? searchPathFilter : DirectoryMembershipFilter.AllowAllFilter;
}
else
{
var enumeratePatternFilter = RegexDirectoryMembershipFilter.Create(enumeratePatternRegex);
directoryFilter = isSearchPath ? enumeratePatternFilter.Union(searchPathFilter) : enumeratePatternFilter;
}
maybeDirectoryFingerprint = environment.TryQueryDirectoryFingerprint(
path,
pip,
filter: directoryFilter,
isReadOnlyDirectory: observationsUnderSourceSealDirectories.Contains(i),
eventData: new DirectoryMembershipHashedEventData()
{
Directory = path,
IsSearchPath = isSearchPath,
EnumeratePatternRegex = enumeratePatternRegex
},
sharedOpaqueOutputs,
createdDirectories,
environment.State.AlienFileEnumerationCache,
enumerationMode: out mode,
trackPathExistence: trackFileChanges);
if (mode == DirectoryEnumerationMode.MinimalGraph)
{
minimalGraphUsed = true;
}
enumeratedDirectories.Add(path, (directoryFilter, mode));
}
if (maybeDirectoryFingerprint.HasValue)
{
if (maybeDirectoryFingerprint == DirectoryFingerprint.Zero)
{
// We need to normalize 'empty' directories to look 'absent' since the determination of
// directory vs. absent above is based on the *full graph* + real FS, but the
// 'existential' VFS (either the pip scoped FS or real FS, depending on mount).
// - A directory is in the global VFS, but not the existential VFS
// - A directory is in neither.
// Without some workaround, those two cases would cause oscillation between the Directory and Absent types respectively.
// So, we canonicalize such that a path absent according to the existential FS becomes an absent probe regardless
// TODO: We accomplish this for now by treating the null fingerprint specially; but this is kind of broken since that might mean "directory exists but empty", which can genuinely occur when looking at the real FS.
maybeProposed = ObservedInput.CreateAbsentPathProbe(
path,
flags,
isSearchPath: isSearchPath,
enumeratePatternRegex: enumeratePatternRegex);
}
else
{
maybeProposed = ObservedInput.CreateDirectoryEnumeration(
path,
maybeDirectoryFingerprint.Value,
isSearchPath: isSearchPath,
enumeratePatternRegex: enumeratePatternRegex);
}
}
else
{
maybeProposed = null;
// TODO: This shouldn't always be an error.
Logger.Log.PipDirectoryMembershipFingerprintingError(
operationContext,
pip.Description,
path.ToString(pathTable));
}
break;
default:
throw Contract.AssertFailure("Unreachable");
}
if (maybeProposed.HasValue)
{
ObservedInput proposed = maybeProposed.Value;
// This no longer has any function other than being a test hook;
target.CheckProposedObservedInput(observation, proposed);
observedInputs[valid++] = proposed;
if (!proposed.Path.IsValid)
{
Contract.Assume(proposed.Path.IsValid, "Created an ObservedInput with an invalid path in ObservedInputProcessor line 660. Type: " + proposed.Type.ToString());
}
switch (proposed.Type)
{
case ObservedInputType.AbsentPathProbe:
numAbsentPathProbes++;
break;
case ObservedInputType.DirectoryEnumeration:
numDirectoryEnumerations++;
break;
case ObservedInputType.ExistingDirectoryProbe:
numExistingDirectoryProbes++;
break;
case ObservedInputType.FileContentRead:
numFileContentReads++;
break;
case ObservedInputType.ExistingFileProbe:
numExistingFileProbes++;
break;
default:
Contract.Assert(false, "Unknown ObservedInputType has been encountered: " + type);
break;
}
}
else
{
status = CombineObservedInputProcessingStatus(status, ObservedInputProcessingStatus.Aborted);
invalid++;
}
}
}
if (minimalGraphUsed)
{
environment.Counters.IncrementCounter(PipExecutorCounter.NumPipsUsingMinimalGraphFileSystem);
}
foreach (var path in enumeratedDirectories.Keys) {
dynamicObservations.Add((path, DynamicObservationKind.Enumeration));
}
environment.Counters.AddToCounter(PipExecutorCounter.NumAbsentPathsEliminated, numAbsentPathsEliminated);
environment.Counters.AddToCounter(PipExecutorCounter.AbsentPathProbes, numAbsentPathProbes);
environment.Counters.AddToCounter(PipExecutorCounter.DirectoryEnumerations, numDirectoryEnumerations);
environment.Counters.AddToCounter(PipExecutorCounter.ExistingDirectoryProbes, numExistingDirectoryProbes);
environment.Counters.AddToCounter(PipExecutorCounter.FileContentReads, numFileContentReads);
environment.Counters.AddToCounter(PipExecutorCounter.ExistingFileProbes, numExistingFileProbes);
if (status == ObservedInputProcessingStatus.Success)
{
Contract.Assume(invalid == 0);
Contract.Assume(valid <= observedInputs.Length);
Contract.Assume(observedAccessedFileNames.IsValid);
if (valid != observedInputs.Length)
{
// We may have valid < observedInputs.Length due to SuppressAndIgnorePath, e.g. due to monitoring allowlists.
Array.Resize(ref observedInputs, valid);
}
// Note that we validated the sort order of 'observations', and 'observedInputs' is in an equivalent order.
return ObservedInputProcessingResult.CreateForSuccess(
observedInputs: SortedReadOnlyArray<ObservedInput, ObservedInputExpandedPathComparer>.FromSortedArrayUnsafe(
ReadOnlyArray<ObservedInput>.FromWithoutCopy(observedInputs),
new ObservedInputExpandedPathComparer(pathComparer)),
observedAccessedFileNames: observedAccessedFileNames,
dynamicObservations: ReadOnlyArray<(AbsolutePath, DynamicObservationKind)>.From(dynamicObservations),
allowedUndeclaredSourceReads: allowedUndeclaredSourceReads.ToReadOnlySet());
}
else
{
Contract.Assume(invalid > 0);
return ObservedInputProcessingResult.CreateForFailure(
status: status,
numberOfValidEntries: valid,
numberOfInvalidEntries: invalid,
dynamicObservations: ReadOnlyArray<(AbsolutePath, DynamicObservationKind)>.From(dynamicObservations),
allowedUndeclaredSourceReads: allowedUndeclaredSourceReads.ToReadOnlySet());
}
}
}