in src/include/doctest.h [6519:6776]
int Context::run() {
using namespace detail;
// save the old context state in case such was setup - for using asserts out of a testing context
auto old_cs = g_cs;
// this is the current contest
g_cs = p;
is_running_in_test = true;
g_no_colors = p->no_colors;
p->resetRunData();
std::fstream fstr;
if(p->cout == nullptr) {
if(p->quiet) {
p->cout = &discardOut;
} else if(p->out.size()) {
// to a file if specified
fstr.open(p->out.c_str(), std::fstream::out);
p->cout = &fstr;
} else {
// stdout by default
p->cout = &std::cout;
}
}
FatalConditionHandler::allocateAltStackMem();
auto cleanup_and_return = [&]() {
FatalConditionHandler::freeAltStackMem();
if(fstr.is_open())
fstr.close();
// restore context
g_cs = old_cs;
is_running_in_test = false;
// we have to free the reporters which were allocated when the run started
for(auto& curr : p->reporters_currently_used)
delete curr;
p->reporters_currently_used.clear();
if(p->numTestCasesFailed && !p->no_exitcode)
return EXIT_FAILURE;
return EXIT_SUCCESS;
};
// setup default reporter if none is given through the command line
if(p->filters[8].empty())
p->filters[8].push_back("console");
// check to see if any of the registered reporters has been selected
for(auto& curr : getReporters()) {
if(matchesAny(curr.first.second.c_str(), p->filters[8], false, p->case_sensitive))
p->reporters_currently_used.push_back(curr.second(*g_cs));
}
// TODO: check if there is nothing in reporters_currently_used
// prepend all listeners
for(auto& curr : getListeners())
p->reporters_currently_used.insert(p->reporters_currently_used.begin(), curr.second(*g_cs));
#ifdef DOCTEST_PLATFORM_WINDOWS
if(isDebuggerActive() && p->no_debug_output == false)
p->reporters_currently_used.push_back(new DebugOutputWindowReporter(*g_cs));
#endif // DOCTEST_PLATFORM_WINDOWS
// handle version, help and no_run
if(p->no_run || p->version || p->help || p->list_reporters) {
DOCTEST_ITERATE_THROUGH_REPORTERS(report_query, QueryData());
return cleanup_and_return();
}
std::vector<const TestCase*> testArray;
for(auto& curr : getRegisteredTests())
testArray.push_back(&curr);
p->numTestCases = testArray.size();
// sort the collected records
if(!testArray.empty()) {
if(p->order_by.compare("file", true) == 0) {
std::sort(testArray.begin(), testArray.end(), fileOrderComparator);
} else if(p->order_by.compare("suite", true) == 0) {
std::sort(testArray.begin(), testArray.end(), suiteOrderComparator);
} else if(p->order_by.compare("name", true) == 0) {
std::sort(testArray.begin(), testArray.end(), nameOrderComparator);
} else if(p->order_by.compare("rand", true) == 0) {
std::srand(p->rand_seed);
// random_shuffle implementation
const auto first = &testArray[0];
for(size_t i = testArray.size() - 1; i > 0; --i) {
int idxToSwap = std::rand() % (i + 1); // NOLINT
const auto temp = first[i];
first[i] = first[idxToSwap];
first[idxToSwap] = temp;
}
} else if(p->order_by.compare("none", true) == 0) {
// means no sorting - beneficial for death tests which call into the executable
// with a specific test case in mind - we don't want to slow down the startup times
}
}
std::set<String> testSuitesPassingFilt;
bool query_mode = p->count || p->list_test_cases || p->list_test_suites;
std::vector<const TestCaseData*> queryResults;
if(!query_mode)
DOCTEST_ITERATE_THROUGH_REPORTERS(test_run_start, DOCTEST_EMPTY);
// invoke the registered functions if they match the filter criteria (or just count them)
for(auto& curr : testArray) {
const auto& tc = *curr;
bool skip_me = false;
if(tc.m_skip && !p->no_skip)
skip_me = true;
if(!matchesAny(tc.m_file.c_str(), p->filters[0], true, p->case_sensitive))
skip_me = true;
if(matchesAny(tc.m_file.c_str(), p->filters[1], false, p->case_sensitive))
skip_me = true;
if(!matchesAny(tc.m_test_suite, p->filters[2], true, p->case_sensitive))
skip_me = true;
if(matchesAny(tc.m_test_suite, p->filters[3], false, p->case_sensitive))
skip_me = true;
if(!matchesAny(tc.m_name, p->filters[4], true, p->case_sensitive))
skip_me = true;
if(matchesAny(tc.m_name, p->filters[5], false, p->case_sensitive))
skip_me = true;
if(!skip_me)
p->numTestCasesPassingFilters++;
// skip the test if it is not in the execution range
if((p->last < p->numTestCasesPassingFilters && p->first <= p->last) ||
(p->first > p->numTestCasesPassingFilters))
skip_me = true;
if(skip_me) {
if(!query_mode)
DOCTEST_ITERATE_THROUGH_REPORTERS(test_case_skipped, tc);
continue;
}
// do not execute the test if we are to only count the number of filter passing tests
if(p->count)
continue;
// print the name of the test and don't execute it
if(p->list_test_cases) {
queryResults.push_back(&tc);
continue;
}
// print the name of the test suite if not done already and don't execute it
if(p->list_test_suites) {
if((testSuitesPassingFilt.count(tc.m_test_suite) == 0) && tc.m_test_suite[0] != '\0') {
queryResults.push_back(&tc);
testSuitesPassingFilt.insert(tc.m_test_suite);
p->numTestSuitesPassingFilters++;
}
continue;
}
// execute the test if it passes all the filtering
{
p->currentTest = &tc;
p->failure_flags = TestCaseFailureReason::None;
p->seconds = 0;
// reset atomic counters
p->numAssertsFailedCurrentTest_atomic = 0;
p->numAssertsCurrentTest_atomic = 0;
p->subcasesPassed.clear();
DOCTEST_ITERATE_THROUGH_REPORTERS(test_case_start, tc);
p->timer.start();
bool run_test = true;
do {
// reset some of the fields for subcases (except for the set of fully passed ones)
p->should_reenter = false;
p->subcasesCurrentMaxLevel = 0;
p->subcasesStack.clear();
p->shouldLogCurrentException = true;
// reset stuff for logging with INFO()
p->stringifiedContexts.clear();
#ifndef DOCTEST_CONFIG_NO_EXCEPTIONS
try {
#endif // DOCTEST_CONFIG_NO_EXCEPTIONS
// MSVC 2015 diagnoses fatalConditionHandler as unused (because reset() is a static method)
DOCTEST_MSVC_SUPPRESS_WARNING_WITH_PUSH(4101) // unreferenced local variable
FatalConditionHandler fatalConditionHandler; // Handle signals
// execute the test
tc.m_test();
fatalConditionHandler.reset();
DOCTEST_MSVC_SUPPRESS_WARNING_POP
#ifndef DOCTEST_CONFIG_NO_EXCEPTIONS
} catch(const TestFailureException&) {
p->failure_flags |= TestCaseFailureReason::AssertFailure;
} catch(...) {
DOCTEST_ITERATE_THROUGH_REPORTERS(test_case_exception,
{translateActiveException(), false});
p->failure_flags |= TestCaseFailureReason::Exception;
}
#endif // DOCTEST_CONFIG_NO_EXCEPTIONS
// exit this loop if enough assertions have failed - even if there are more subcases
if(p->abort_after > 0 &&
p->numAssertsFailed + p->numAssertsFailedCurrentTest_atomic >= p->abort_after) {
run_test = false;
p->failure_flags |= TestCaseFailureReason::TooManyFailedAsserts;
}
if(p->should_reenter && run_test)
DOCTEST_ITERATE_THROUGH_REPORTERS(test_case_reenter, tc);
if(!p->should_reenter)
run_test = false;
} while(run_test);
p->finalizeTestCaseData();
DOCTEST_ITERATE_THROUGH_REPORTERS(test_case_end, *g_cs);
p->currentTest = nullptr;
// stop executing tests if enough assertions have failed
if(p->abort_after > 0 && p->numAssertsFailed >= p->abort_after)
break;
}
}
if(!query_mode) {
DOCTEST_ITERATE_THROUGH_REPORTERS(test_run_end, *g_cs);
} else {
QueryData qdata;
qdata.run_stats = g_cs;
qdata.data = queryResults.data();
qdata.num_data = unsigned(queryResults.size());
DOCTEST_ITERATE_THROUGH_REPORTERS(report_query, qdata);
}
return cleanup_and_return();
}