in src/Agent.Worker/TestResults/Legacy/LegacyTestRunDataPublisher.cs [91:233]
private async Task<bool> PublishAllTestResultsToSingleTestRunAsync(List<string> resultFiles, ITestRunPublisher publisher, TestRunContext runContext, string resultReader, string runTitle, int? buildId, CancellationToken cancellationToken)
{
bool isTestRunOutcomeFailed = false;
try
{
//use local time since TestRunData defaults to local times
DateTime minStartDate = DateTime.MaxValue;
DateTime maxCompleteDate = DateTime.MinValue;
DateTime presentTime = DateTime.UtcNow;
bool dateFormatError = false;
TimeSpan totalTestCaseDuration = TimeSpan.Zero;
List<string> runAttachments = new List<string>();
List<TestCaseResultData> runResults = new List<TestCaseResultData>();
TestRunSummary testRunSummary = new TestRunSummary();
//read results from each file
foreach (string resultFile in resultFiles)
{
cancellationToken.ThrowIfCancellationRequested();
//test case results
_executionContext.Debug(StringUtil.Format("Reading test results from file '{0}'", resultFile));
TestRunData resultFileRunData = publisher.ReadResultsFromFile(runContext, resultFile);
isTestRunOutcomeFailed = isTestRunOutcomeFailed || GetTestRunOutcome(resultFileRunData, testRunSummary);
if (resultFileRunData != null)
{
if (resultFileRunData.Results != null && resultFileRunData.Results.Length > 0)
{
try
{
if (string.IsNullOrEmpty(resultFileRunData.StartDate) || string.IsNullOrEmpty(resultFileRunData.CompleteDate))
{
dateFormatError = true;
}
//As per discussion with Manoj(refer bug 565487): Test Run duration time should be minimum Start Time to maximum Completed Time when merging
if (!string.IsNullOrEmpty(resultFileRunData.StartDate))
{
DateTime startDate = DateTime.Parse(resultFileRunData.StartDate, null, DateTimeStyles.RoundtripKind);
minStartDate = minStartDate > startDate ? startDate : minStartDate;
if (!string.IsNullOrEmpty(resultFileRunData.CompleteDate))
{
DateTime endDate = DateTime.Parse(resultFileRunData.CompleteDate, null, DateTimeStyles.RoundtripKind);
maxCompleteDate = maxCompleteDate < endDate ? endDate : maxCompleteDate;
}
}
}
catch (FormatException)
{
_executionContext.Warning(StringUtil.Loc("InvalidDateFormat", resultFile, resultFileRunData.StartDate, resultFileRunData.CompleteDate));
dateFormatError = true;
}
//continue to calculate duration as a fallback for case: if there is issue with format or dates are null or empty
foreach (TestCaseResultData tcResult in resultFileRunData.Results)
{
int durationInMs = Convert.ToInt32(tcResult.DurationInMs);
totalTestCaseDuration = totalTestCaseDuration.Add(TimeSpan.FromMilliseconds(durationInMs));
}
runResults.AddRange(resultFileRunData.Results);
//run attachments
if (resultFileRunData.Attachments != null)
{
runAttachments.AddRange(resultFileRunData.Attachments);
}
}
else
{
_executionContext.Output(StringUtil.Loc("NoResultFound", resultFile));
}
}
else
{
_executionContext.Warning(StringUtil.Loc("InvalidResultFiles", resultFile, resultReader));
}
}
//publish run if there are results.
if (runResults.Count > 0)
{
string runName = string.IsNullOrWhiteSpace(runTitle)
? StringUtil.Format("{0}_TestResults_{1}", _resultReader.Name, buildId)
: runTitle;
if (DateTime.Compare(minStartDate, maxCompleteDate) > 0)
{
_executionContext.Warning(StringUtil.Loc("InvalidCompletedDate", maxCompleteDate, minStartDate));
dateFormatError = true;
}
minStartDate = DateTime.Equals(minStartDate, DateTime.MaxValue) ? presentTime : minStartDate;
maxCompleteDate = dateFormatError || DateTime.Equals(maxCompleteDate, DateTime.MinValue) ? minStartDate.Add(totalTestCaseDuration) : maxCompleteDate;
// create test run
TestRunData testRunData = new TestRunData(
name: runName,
startedDate: minStartDate.ToString("o"),
completedDate: maxCompleteDate.ToString("o"),
state: "InProgress",
isAutomated: true,
buildId: runContext != null ? runContext.BuildId : 0,
buildFlavor: runContext != null ? runContext.Configuration : string.Empty,
buildPlatform: runContext != null ? runContext.Platform : string.Empty,
releaseUri: runContext != null ? runContext.ReleaseUri : null,
releaseEnvironmentUri: runContext != null ? runContext.ReleaseEnvironmentUri : null
);
testRunData.PipelineReference = runContext.PipelineReference;
testRunData.Attachments = runAttachments.ToArray();
testRunData.AddCustomField(_testRunSystemCustomFieldName, runContext.TestRunSystem);
AddTargetBranchInfoToRunCreateModel(testRunData, runContext.TargetBranchName);
TestRun testRun = await publisher.StartTestRunAsync(testRunData, _executionContext.CancellationToken);
await publisher.AddResultsAsync(testRun, runResults.ToArray(), _executionContext.CancellationToken);
TestRun updatedRun = await publisher.EndTestRunAsync(testRunData, testRun.Id, true, _executionContext.CancellationToken);
// Check failed results for flaky aware
// Fallback to flaky aware if there are any failures.
bool isFlakyCheckEnabled = _featureFlagService.GetFeatureFlagState(TestResultsConstants.EnableFlakyCheckInAgentFeatureFlag, TestResultsConstants.TCMServiceInstanceGuid);
if (isTestRunOutcomeFailed && isFlakyCheckEnabled)
{
IList<TestRun> publishedRuns = new List<TestRun>();
publishedRuns.Add(updatedRun);
var runOutcome = _testRunPublisherHelper.CheckRunsForFlaky(publishedRuns, _projectName);
if (runOutcome != null && runOutcome.HasValue)
{
isTestRunOutcomeFailed = runOutcome.Value;
}
}
StoreTestRunSummaryInEnvVar(testRunSummary);
}
}
catch (Exception ex) when (!(ex is OperationCanceledException && _executionContext.CancellationToken.IsCancellationRequested))
{
// Not catching all the operationcancelled exceptions, as the pipeline cancellation should cancel the command as well.
// Do not fail the task.
LogPublishTestResultsFailureWarning(ex);
}
return isTestRunOutcomeFailed;
}