in src/vstest.console/Internal/ConsoleLogger.cs [678:901]
private void TestRunCompleteHandler(object sender, TestRunCompleteEventArgs e)
{
// Stop the progress indicator as we are about to print the summary
_progressIndicator?.Stop();
var passedTests = 0;
var failedTests = 0;
var skippedTests = 0;
var totalTests = 0;
Output.WriteLine(string.Empty, OutputLevel.Information);
// Printing Run-level Attachments
var runLevelAttachementCount = (e.AttachmentSets == null) ? 0 : e.AttachmentSets.Sum(attachmentSet => attachmentSet.Attachments.Count);
if (runLevelAttachementCount > 0)
{
Output.Information(false, CommandLineResources.AttachmentsBanner);
foreach (var attachmentSet in e.AttachmentSets)
{
foreach (var uriDataAttachment in attachmentSet.Attachments)
{
var attachmentOutput = string.Format(CultureInfo.CurrentCulture, CommandLineResources.AttachmentOutputFormat, uriDataAttachment.Uri.LocalPath);
Output.Information(false, attachmentOutput);
}
}
}
var leafTestResultsPerSource = LeafTestResults.Select(p => p.Value).GroupBy(r => r.TestCase.Source);
foreach (var sd in leafTestResultsPerSource)
{
var source = sd.Key;
var sourceSummary = new SourceSummary();
var results = sd.ToArray();
// duration of the whole source is the difference between the test that ended last and the one that started first
sourceSummary.Duration = !results.Any() ? TimeSpan.Zero : results.Max(r => r.EndTime) - results.Min(r => r.StartTime);
foreach (var result in results)
{
switch (result.Outcome)
{
case TestOutcome.Passed:
sourceSummary.TotalTests++;
sourceSummary.PassedTests++;
break;
case TestOutcome.Failed:
sourceSummary.TotalTests++;
sourceSummary.FailedTests++;
break;
case TestOutcome.Skipped:
sourceSummary.TotalTests++;
sourceSummary.SkippedTests++;
break;
default:
break;
}
}
if (VerbosityLevel == Verbosity.Quiet || VerbosityLevel == Verbosity.Minimal)
{
TestOutcome sourceOutcome = TestOutcome.None;
if (sourceSummary.FailedTests > 0)
{
sourceOutcome = TestOutcome.Failed;
}
else if (sourceSummary.PassedTests > 0)
{
sourceOutcome = TestOutcome.Passed;
}
else if (sourceSummary.SkippedTests > 0)
{
sourceOutcome = TestOutcome.Skipped;
}
string resultString;
switch (sourceOutcome)
{
case TestOutcome.Failed:
resultString = (CommandLineResources.FailedTestIndicator + "!").PadRight(LongestResultIndicator);
break;
case TestOutcome.Passed:
resultString = (CommandLineResources.PassedTestIndicator + "!").PadRight(LongestResultIndicator);
break;
case TestOutcome.Skipped:
resultString = (CommandLineResources.SkippedTestIndicator + "!").PadRight(LongestResultIndicator);
break;
default:
resultString = CommandLineResources.None.PadRight(LongestResultIndicator);
break;
}
var failed = sourceSummary.FailedTests.ToString().PadLeft(5);
var passed = sourceSummary.PassedTests.ToString().PadLeft(5);
var skipped = sourceSummary.SkippedTests.ToString().PadLeft(5);
var total = sourceSummary.TotalTests.ToString().PadLeft(5);
var frameworkString = string.IsNullOrEmpty(_targetFramework)
? string.Empty
: $"({_targetFramework})";
var duration = GetFormattedDurationString(sourceSummary.Duration);
var sourceName = sd.Key.Split('\\').Last();
var outputLine = string.Format(CultureInfo.CurrentCulture, CommandLineResources.TestRunSummary,
resultString,
failed,
passed,
skipped,
total,
duration,
sourceName,
frameworkString);
ConsoleColor? color = null;
if (sourceOutcome == TestOutcome.Failed)
{
color = ConsoleColor.Red;
}
else if (sourceOutcome == TestOutcome.Passed)
{
color = ConsoleColor.Green;
}
else if (sourceOutcome == TestOutcome.Skipped)
{
color = ConsoleColor.Yellow;
}
if (color != null)
{
Output.Write(outputLine, OutputLevel.Information, color.Value);
}
else
{
Output.Write(outputLine, OutputLevel.Information);
}
Output.Information(false, CommandLineResources.TestRunSummaryAssemblyAndFramework,
sourceName,
frameworkString);
}
passedTests += sourceSummary.PassedTests;
failedTests += sourceSummary.FailedTests;
skippedTests += sourceSummary.SkippedTests;
totalTests += sourceSummary.TotalTests;
}
if (VerbosityLevel == Verbosity.Quiet || VerbosityLevel == Verbosity.Minimal)
{
if (e.IsCanceled)
{
Output.Error(false, CommandLineResources.TestRunCanceled);
}
else if (e.IsAborted)
{
if (e.Error == null)
{
Output.Error(false, CommandLineResources.TestRunAborted);
}
else
{
Output.Error(false, CommandLineResources.TestRunAbortedWithError, e.Error);
}
}
return;
}
if (e.IsCanceled)
{
Output.Error(false, CommandLineResources.TestRunCanceled);
}
else if (e.IsAborted)
{
if (e.Error == null)
{
Output.Error(false, CommandLineResources.TestRunAborted);
}
else
{
Output.Error(false, CommandLineResources.TestRunAbortedWithError, e.Error);
}
}
else if (failedTests > 0 || _testRunHasErrorMessages)
{
Output.Error(false, CommandLineResources.TestRunFailed);
}
else if (totalTests > 0)
{
Output.Information(false, ConsoleColor.Green, CommandLineResources.TestRunSuccessful);
}
// Output a summary.
if (totalTests > 0)
{
string totalTestsformat = (e.IsAborted || e.IsCanceled) ? CommandLineResources.TestRunSummaryForCanceledOrAbortedRun : CommandLineResources.TestRunSummaryTotalTests;
Output.Information(false, string.Format(CultureInfo.CurrentCulture, totalTestsformat, totalTests));
if (passedTests > 0)
{
Output.Information(false, ConsoleColor.Green, string.Format(CultureInfo.CurrentCulture, CommandLineResources.TestRunSummaryPassedTests, passedTests));
}
if (failedTests > 0)
{
Output.Information(false, ConsoleColor.Red, string.Format(CultureInfo.CurrentCulture, CommandLineResources.TestRunSummaryFailedTests, failedTests));
}
if (skippedTests > 0)
{
Output.Information(false, ConsoleColor.Yellow, string.Format(CultureInfo.CurrentCulture, CommandLineResources.TestRunSummarySkippedTests, skippedTests));
}
}
if (totalTests > 0)
{
if (e.ElapsedTimeInRunningTests.Equals(TimeSpan.Zero))
{
EqtTrace.Info("Skipped printing test execution time on console because it looks like the test run had faced some errors");
}
else
{
PrintTimeSpan(e.ElapsedTimeInRunningTests);
}
}
}