Error out early if invalid test specs are provided

This commit is contained in:
Martin Hořeňovský 2021-12-12 15:18:50 +01:00
parent 3bfe900bbc
commit 08939cc8bb
No known key found for this signature in database
GPG Key ID: DE48307B8B0D381A
3 changed files with 42 additions and 22 deletions

View File

@ -197,6 +197,7 @@ new design.
* Catch2's pkg-config integration also provides 2 packages * Catch2's pkg-config integration also provides 2 packages
* `catch2` is the statically compiled implementation by itself * `catch2` is the statically compiled implementation by itself
* `catch2-with-main` also links in the default main * `catch2-with-main` also links in the default main
* Passing invalid test specifications passed to Catch2 are now reported before tests are run, and are a hard error.

View File

@ -25,6 +25,7 @@
#include <catch2/internal/catch_move_and_forward.hpp> #include <catch2/internal/catch_move_and_forward.hpp>
#include <algorithm> #include <algorithm>
#include <cassert>
#include <iomanip> #include <iomanip>
#include <set> #include <set>
@ -61,24 +62,30 @@ namespace Catch {
m_config{config}, m_config{config},
m_context{config, CATCH_MOVE(reporter)} { m_context{config, CATCH_MOVE(reporter)} {
auto const& allTestCases = getAllTestCasesSorted(*m_config); assert( m_config->testSpec().getInvalidArgs().empty() &&
m_matches = m_config->testSpec().matchesByFilter(allTestCases, *m_config); "Invalid test specs should be handled before running tests" );
auto const& invalidArgs = m_config->testSpec().getInvalidArgs();
if (m_matches.empty() && invalidArgs.empty()) { auto const& allTestCases = getAllTestCasesSorted(*m_config);
for (auto const& test : allTestCases) auto const& testSpec = m_config->testSpec();
if (!test.getTestCaseInfo().isHidden()) if ( !testSpec.hasFilters() ) {
m_tests.emplace(&test); for ( auto const& test : allTestCases ) {
if ( !test.getTestCaseInfo().isHidden() ) {
m_tests.emplace( &test );
}
}
} else { } else {
for (auto const& match : m_matches) m_matches =
m_tests.insert(match.tests.begin(), match.tests.end()); testSpec.matchesByFilter( allTestCases, *m_config );
for ( auto const& match : m_matches ) {
m_tests.insert( match.tests.begin(),
match.tests.end() );
}
} }
m_tests = createShard(m_tests, m_config->shardCount(), m_config->shardIndex()); m_tests = createShard(m_tests, m_config->shardCount(), m_config->shardIndex());
} }
Totals execute() { Totals execute() {
auto const& invalidArgs = m_config->testSpec().getInvalidArgs();
Totals totals; Totals totals;
for (auto const& testCase : m_tests) { for (auto const& testCase : m_tests) {
if (!m_context.aborting()) if (!m_context.aborting())
@ -94,11 +101,6 @@ namespace Catch {
} }
} }
if (!invalidArgs.empty()) {
for (auto const& invalidArg: invalidArgs)
m_reporter->reportInvalidArguments(invalidArg);
}
return totals; return totals;
} }
@ -284,6 +286,15 @@ namespace Catch {
// Create reporter(s) so we can route listings through them // Create reporter(s) so we can route listings through them
auto reporter = makeReporter(m_config.get()); auto reporter = makeReporter(m_config.get());
auto const& invalidSpecs = m_config->testSpec().getInvalidArgs();
if ( !invalidSpecs.empty() ) {
for ( auto const& spec : invalidSpecs ) {
reporter->reportInvalidArguments( spec );
}
return 1;
}
// Handle list request // Handle list request
if (list(*reporter, *m_config)) { if (list(*reporter, *m_config)) {
return 0; return 0;

View File

@ -294,14 +294,12 @@ set_tests_properties(TestsInFile::SimpleSpecs PROPERTIES PASS_REGULAR_EXPRESSION
add_test(NAME TestsInFile::EscapeSpecialCharacters COMMAND $<TARGET_FILE:SelfTest> "-f ${SELF_TEST_DIR}/Misc/special-characters-in-file.input") add_test(NAME TestsInFile::EscapeSpecialCharacters COMMAND $<TARGET_FILE:SelfTest> "-f ${SELF_TEST_DIR}/Misc/special-characters-in-file.input")
set_tests_properties(TestsInFile::EscapeSpecialCharacters PROPERTIES PASS_REGULAR_EXPRESSION "1 assertion in 1 test case") set_tests_properties(TestsInFile::EscapeSpecialCharacters PROPERTIES PASS_REGULAR_EXPRESSION "1 assertion in 1 test case")
# CTest does not allow us to create an AND of required regular expressions,
# so we have to split the test into 2 parts and look for parts of the expected
# output separately.
add_test(NAME TestsInFile::InvalidTestNames-1 COMMAND $<TARGET_FILE:SelfTest> "-f ${SELF_TEST_DIR}/Misc/invalid-test-names.input") add_test(NAME TestsInFile::InvalidTestNames-1 COMMAND $<TARGET_FILE:SelfTest> "-f ${SELF_TEST_DIR}/Misc/invalid-test-names.input")
set_tests_properties(TestsInFile::InvalidTestNames-1 PROPERTIES PASS_REGULAR_EXPRESSION "Invalid Filter: \"Test with special, characters in \\\\\" name\"") set_tests_properties(TestsInFile::InvalidTestNames-1
PROPERTIES
add_test(NAME TestsInFile::InvalidTestNames-2 COMMAND $<TARGET_FILE:SelfTest> "-f ${SELF_TEST_DIR}/Misc/invalid-test-names.input") PASS_REGULAR_EXPRESSION "Invalid Filter: \"Test with special, characters in \\\\\" name\""
set_tests_properties(TestsInFile::InvalidTestNames-2 PROPERTIES PASS_REGULAR_EXPRESSION "No tests ran") FAIL_REGULAR_EXPRESSION "No tests ran"
)
add_test(NAME TagAlias COMMAND $<TARGET_FILE:SelfTest> [@tricky] --list-tests) add_test(NAME TagAlias COMMAND $<TARGET_FILE:SelfTest> [@tricky] --list-tests)
set_tests_properties(TagAlias PROPERTIES set_tests_properties(TagAlias PROPERTIES
@ -371,6 +369,16 @@ set_tests_properties("Benchmarking::FailureReporting::ShouldFailIsRespected"
PASS_REGULAR_EXPRESSION "1 failed as expected" PASS_REGULAR_EXPRESSION "1 failed as expected"
) )
add_test(NAME "ErrorHandling::InvalidTestSpecExitsEarly"
COMMAND
$<TARGET_FILE:SelfTest> "[aa,a]"
)
set_tests_properties("ErrorHandling::InvalidTestSpecExitsEarly"
PROPERTIES
PASS_REGULAR_EXPRESSION "Invalid Filter: \\[aa\,a\\]"
FAIL_REGULAR_EXPRESSION "No tests ran"
)
if (CATCH_USE_VALGRIND) if (CATCH_USE_VALGRIND)
add_test(NAME ValgrindRunTests COMMAND valgrind --leak-check=full --error-exitcode=1 $<TARGET_FILE:SelfTest>) add_test(NAME ValgrindRunTests COMMAND valgrind --leak-check=full --error-exitcode=1 $<TARGET_FILE:SelfTest>)